diff --git a/.gitignore b/.gitignore index e301a9d6b7..53f6eb72ed 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ Products/ZenUI3/node_modules dist build Zenoss.egg-info -install-zenoss.mk lib/python2.7/site-packages/Zenoss-nspkg.pth lib/python2.7/site-packages/Zenoss.egg-link diff --git a/Products/DataCollector/ApplyDataMap/datamaputils.py b/Products/DataCollector/ApplyDataMap/datamaputils.py index 232c6bc844..d3f8a52ab7 100644 --- a/Products/DataCollector/ApplyDataMap/datamaputils.py +++ b/Products/DataCollector/ApplyDataMap/datamaputils.py @@ -10,7 +10,6 @@ import logging import sys -from six import string_types from zope.event import notify from Products.DataCollector.plugins.DataMaps import MultiArgs @@ -155,7 +154,7 @@ def _get_attr_value(obj, attr): def _sanitize_value(value, obj): - if isinstance(value, string_types): + if isinstance(value, basestring): try: return _decode_value(value, obj) except UnicodeDecodeError: diff --git a/Products/DataCollector/ApplyDataMap/incrementalupdate.py b/Products/DataCollector/ApplyDataMap/incrementalupdate.py index 993d81abf1..513306f4ab 100644 --- a/Products/DataCollector/ApplyDataMap/incrementalupdate.py +++ b/Products/DataCollector/ApplyDataMap/incrementalupdate.py @@ -279,7 +279,7 @@ def _add(self): for objId, obj in self.relationship.objectItemsAll() if objId == self._target_id ), - _NOTSET + _NOTSET, ) if self._target is _NOTSET: changed = True diff --git a/Products/DataCollector/ApplyDataMap/tests/test_add_directive.py b/Products/DataCollector/ApplyDataMap/tests/test_add_directive.py index eb280b30f2..52c0ab4c93 100644 --- a/Products/DataCollector/ApplyDataMap/tests/test_add_directive.py +++ b/Products/DataCollector/ApplyDataMap/tests/test_add_directive.py @@ -13,12 +13,11 @@ from ..applydatamap import ApplyDataMap, ObjectMap -PATH = {'src': 'Products.DataCollector.ApplyDataMap.applydatamap'} +PATH = {"src": "Products.DataCollector.ApplyDataMap.applydatamap"} class TestImplicitAdd(BaseTestCase): - """Test ApplyDataMap directives. - """ + """Test ApplyDataMap directives.""" def afterSetUp(t): super(TestImplicitAdd, t).afterSetUp() @@ -52,7 +51,6 @@ def test_implicit_update_twice_with_same_data(t): class TestExplicitAdd(BaseTestCase): - def afterSetUp(t): super(TestExplicitAdd, t).afterSetUp() t.om1 = ObjectMap( @@ -125,8 +123,7 @@ def test_explicit_add_false_with_existing_device_with_changes(t): class TestAddSequence(BaseTestCase): - """Test ApplyDataMap directives. - """ + """Test ApplyDataMap directives.""" def afterSetUp(t): super(TestAddSequence, t).afterSetUp() diff --git a/Products/DataCollector/ApplyDataMap/tests/test_datamaputils.py b/Products/DataCollector/ApplyDataMap/tests/test_datamaputils.py index 600f0b989d..e82783fd4f 100644 --- a/Products/DataCollector/ApplyDataMap/tests/test_datamaputils.py +++ b/Products/DataCollector/ApplyDataMap/tests/test_datamaputils.py @@ -8,6 +8,7 @@ ############################################################################## from base64 import b64encode + from mock import Mock, sentinel, patch from Products.DataCollector.plugins.DataMaps import ObjectMap diff --git a/Products/DataCollector/ApplyDataMap/tests/test_incrementalupdate.py b/Products/DataCollector/ApplyDataMap/tests/test_incrementalupdate.py index 52dbfd144b..db4155e938 100644 --- a/Products/DataCollector/ApplyDataMap/tests/test_incrementalupdate.py +++ b/Products/DataCollector/ApplyDataMap/tests/test_incrementalupdate.py @@ -60,7 +60,11 @@ def setup_mock_environment(t): t.relationship = Mock( name="relationship", spec_set=[ - t.target.id, "_getOb", "hasobject", "_setObject", "objectItemsAll" + t.target.id, + "_getOb", + "hasobject", + "_setObject", + "objectItemsAll", ], ) setattr(t.relationship, t.target.id, t.target) diff --git a/Products/DataCollector/ApplyDataMap/tests/utils.py b/Products/DataCollector/ApplyDataMap/tests/utils.py index e1de5f23d9..49ed59f2c3 100644 --- a/Products/DataCollector/ApplyDataMap/tests/utils.py +++ b/Products/DataCollector/ApplyDataMap/tests/utils.py @@ -13,7 +13,6 @@ class BaseTestCase(TestCase): - def setUp(t): logging.disable(logging.CRITICAL) diff --git a/Products/DataCollector/Plugins.py b/Products/DataCollector/Plugins.py index 0f211fac4a..0506300a92 100644 --- a/Products/DataCollector/Plugins.py +++ b/Products/DataCollector/Plugins.py @@ -287,9 +287,7 @@ def getPluginLoaders(self, packs): if modname not in self.loadedZenpacks: self.loadedZenpacks.append(modname) modPathPrefix = ".".join( - (modname,) - + self.packPath - + (self.lastModName,) + (modname,) + self.packPath + (self.lastModName,) ) factory = PackLoaderFactory(OsWalker(), modPathPrefix) package = pack.path(*self.packPath + (self.lastModName,)) diff --git a/Products/DataCollector/SnmpClient.py b/Products/DataCollector/SnmpClient.py index 7dfb6ff528..1696d2a4d0 100644 --- a/Products/DataCollector/SnmpClient.py +++ b/Products/DataCollector/SnmpClient.py @@ -15,11 +15,11 @@ from twisted.internet import reactor, error, defer from twisted.python import failure from twisted.internet.error import TimeoutError -from pynetsnmp.twistedsnmp import snmpprotocol, Snmpv3Error +from pynetsnmp.twistedsnmp import snmpprotocol, SnmpUsmError -from Products.ZenCollector.interfaces import IEventService from Products.ZenEvents import Event from Products.ZenEvents.ZenEventClasses import Status_Snmp +from Products.ZenHub.interfaces import IEventService from Products.ZenUtils.Driver import drive from Products.ZenUtils.snmp import ( SnmpAgentDiscoverer, @@ -48,15 +48,15 @@ def __init__( options=None, device=None, datacollector=None, - plugins=[], + plugins=None, ): - BaseClient.__init__(self, device, datacollector) + super(SnmpClient, self).__init__(device, datacollector) global defaultTries, defaultTimeout self.hostname = hostname self.device = device self.options = options self.datacollector = datacollector - self.plugins = plugins + self.plugins = plugins if plugins else [] self._getdata = {} self._tabledata = {} @@ -71,12 +71,16 @@ def initSnmpProxy(self): if self.proxy is not None: self.proxy.close() srcport = snmpprotocol.port() - self.proxy = self.connInfo.createSession(srcport.protocol) - self.proxy.open() + try: + self.proxy = self.connInfo.createSession(srcport.protocol) + self.proxy.open() + except Exception as ex: + log.error("failed to initialize SNMP session error=%s", ex) + self.proxy = None def run(self): """Start snmp collection.""" - log.debug("Starting %s", self.connInfo.summary()) + log.debug("starting %s", self.connInfo.summary()) self.initSnmpProxy() drive(self.doRun).addBoth(self.clientFinished) @@ -98,18 +102,20 @@ def checkCiscoChange(self, driver): result = False else: device.setLastPollSnmpUpTime(lastchange) - except Exception: - pass + except Exception as ex: + log.debug("failed to check Cisco change: %s", ex) yield defer.succeed(result) def doRun(self, driver): + if self.proxy is None: + return # test snmp connectivity log.debug("Testing SNMP configuration") yield self.proxy.walk(".1.3") try: driver.next() except TimeoutError: - log.info("Device timed out: %s", self.connInfo.summary()) + log.info("device timed out %s", self.connInfo.summary()) if self.options.discoverCommunity: yield self.findSnmpCommunity() snmp_config = driver.next() @@ -129,13 +135,14 @@ def doRun(self, driver): self.initSnmpProxy() else: raise - except Snmpv3Error: + except SnmpUsmError as ex: log.error( - "Cannot connect to SNMP agent: %s", self.connInfo.summary() + "cannot connect to SNMP agent error=%s %s", + ex, self.connInfo.summary() ) raise except Exception: - log.exception("Unable to talk: %s", self.connInfo.summary()) + log.exception("unable to talk %s", self.connInfo.summary()) raise changed = True @@ -266,16 +273,16 @@ def clientFinished(self, result): if isinstance(result.value, error.TimeoutError): log.error( - "Device %s timed out: are " "your SNMP settings correct?", + "device %s timed out: are your SNMP settings correct?", self.hostname, ) summary = "SNMP agent down - no response received" log.info("Sending event: %s", summary) - elif isinstance(result.value, Snmpv3Error): + elif isinstance(result.value, SnmpUsmError): log.error( - "Connection to device %s failed: %s", + "SNMP connection failed device=%s error=%s", self.hostname, - result.value.message, + result.value, ) summary = "SNMP v3 specific error during SNMP collection" else: @@ -286,13 +293,14 @@ def clientFinished(self, result): self._sendStatusEvent(summary, eventKey="agent_down") else: self._sendStatusEvent( - "SNMP agent up", eventKey="agent_down", severity=Event.Clear + "SNMP agent up", + eventKey="agent_down", + severity=Event.Clear, ) try: self.proxy.close() except AttributeError: - log.info("Caught AttributeError closing SNMP connection.") - """tell the datacollector that we are all done""" + log.info("caught AttributeError closing SNMP connection.") if self.datacollector: self.datacollector.clientFinished(self) else: diff --git a/Products/DataCollector/SshClient.py b/Products/DataCollector/SshClient.py index d1bb89fbec..0dd4de0cbc 100644 --- a/Products/DataCollector/SshClient.py +++ b/Products/DataCollector/SshClient.py @@ -33,8 +33,8 @@ from Products.DataCollector import CollectorClient from Products.DataCollector.Exceptions import LoginFailed -from Products.ZenCollector.interfaces import IEventService from Products.ZenEvents import Event +from Products.ZenHub.interfaces import IEventService from Products.ZenUtils.IpUtil import getHostByName from Products.ZenUtils.Utils import getExitMessage diff --git a/Products/DataCollector/plugins/CollectorPlugin.py b/Products/DataCollector/plugins/CollectorPlugin.py index 477224fc59..644345b96e 100644 --- a/Products/DataCollector/plugins/CollectorPlugin.py +++ b/Products/DataCollector/plugins/CollectorPlugin.py @@ -128,13 +128,13 @@ def copyDataToProxy(self, device, proxy): def asdate(self, val): """Convert a byte string to the date string 'YYYY/MM/DD HH:MM:SS'""" - datear = (1968, 1, 8, 10, 15, 00) + datear = (1968, 1, 8, 10, 15, 0) try: datear = struct.unpack("!h5B", val[0:7]) except Exception: pass if datear[0] == 0: - datear = (1968, 1, 8, 10, 15, 00) + datear = (1968, 1, 8, 10, 15, 0) return "%d/%02d/%02d %02d:%02d:%02d" % datear[:6] diff --git a/Products/DataCollector/plugins/zenoss/snmp/RouteMap.py b/Products/DataCollector/plugins/zenoss/snmp/RouteMap.py index ed4fd47103..a3a21d2d31 100644 --- a/Products/DataCollector/plugins/zenoss/snmp/RouteMap.py +++ b/Products/DataCollector/plugins/zenoss/snmp/RouteMap.py @@ -105,4 +105,6 @@ def mapSnmpVal(self, value, map): "bbnSpfIgrp", "ospf", "bgp", + "idpr", + "ciscoEigrp", ) diff --git a/Products/DataCollector/zendisc.py b/Products/DataCollector/zendisc.py index 7900a24a48..fa5911d2f6 100755 --- a/Products/DataCollector/zendisc.py +++ b/Products/DataCollector/zendisc.py @@ -21,6 +21,8 @@ from optparse import SUPPRESS_HELP +import six + from twisted.internet import defer from twisted.names.error import DNSNameError @@ -82,7 +84,7 @@ class ZenDisc(ZenModeler): """ initialServices = PBDaemon.initialServices + ["DiscoverService"] - name = "zendisc" + mname = name = "zendisc" scanned = 0 def __init__(self, single=True): @@ -138,7 +140,14 @@ def discoverIps(self, nets): ) continue self.log.info("Discover network '%s'", net.getNetworkName()) - results = yield self.pingMany(net.fullIpList()) + + full_ip_list = net.fullIpList() + if self.options.removeInterfaceIps: + full_ip_list = yield self.config().callRemote( + "removeInterfaces", net + ) + + results = yield self.pingMany(full_ip_list) goodips, badips = _partitionPingResults(results) self.log.debug( "Found %d good IPs and %d bad IPs", len(goodips), len(badips) @@ -162,7 +171,7 @@ def discoverRanges(self): back. """ iprange = self.options.range - if isinstance(iprange, basestring): + if isinstance(iprange, six.string_types): iprange = [iprange] # in case someone uses 10.0.0.0-5,192.168.0.1-5 instead of # --range 10.0.0.0-5 --range 192.168.0.1-5 @@ -177,8 +186,8 @@ def discoverRanges(self): self.log.debug( "Found %d good IPs and %d bad IPs", len(goodips), len(badips) ) - devices = yield self.discoverDevices(goodips) self.log.info("Discovered %d active IPs", len(goodips)) + devices = yield self.discoverDevices(goodips) defer.returnValue(devices) @defer.inlineCallbacks @@ -221,16 +230,16 @@ def sendDiscoveredEvent(self, ip, dev=None, sev=2): if dev: devname = dev.id msg = "Discovered device name '%s' for ip '%s'" % (devname, ip) - evt = dict( - device=devname, - ipAddress=ip, - eventKey=ip, - component=comp, - eventClass=Status_Snmp, - summary=msg, - severity=sev, - agent="Discover", - ) + evt = { + "device": devname, + "ipAddress": ip, + "eventKey": ip, + "component": comp, + "eventClass": Status_Snmp, + "summary": msg, + "severity": sev, + "agent": "Discover", + } self.sendEvent(evt) @defer.inlineCallbacks @@ -295,45 +304,44 @@ def findRemoteDeviceInfo(self, ip, devicePath, deviceSnmpCommunities=None): timeout, retries = snmp_conf["zSnmpTimeout"], snmp_conf["zSnmpTries"] if snmp_conf["zSnmpVer"] == SnmpV3Config.version: for port in ports: + engine = snmp_conf.get("zSnmpEngineId") + context = snmp_conf.get("zSnmpContext") + + common_params = { + "ip": ip, + "port": port, + "timeout": timeout, + "retries": retries, + "securityName": snmp_conf["zSnmpSecurityName"], + } + + if engine: + common_params["engine"] = engine + if context: + common_params["context"] = context + if snmp_conf["zSnmpPrivType"] and snmp_conf["zSnmpAuthType"]: - configs.append( - SnmpV3Config( - ip, - port=port, - timeout=timeout, - retries=retries, - weight=3, - securityName=snmp_conf["zSnmpSecurityName"], - authType=snmp_conf["zSnmpAuthType"], - authPassphrase=snmp_conf["zSnmpAuthPassword"], - privType=snmp_conf["zSnmpPrivType"], - privPassphrase=snmp_conf["zSnmpPrivPassword"], - ) - ) + params = common_params.copy() + params.update({ + "authType": snmp_conf["zSnmpAuthType"], + "authPassphrase": snmp_conf["zSnmpAuthPassword"], + "privType": snmp_conf["zSnmpPrivType"], + "privPassphrase": snmp_conf["zSnmpPrivPassword"], + "weight": 3, + }) + configs.append(SnmpV3Config(**params)) elif snmp_conf["zSnmpAuthType"]: - configs.append( - SnmpV3Config( - ip, - port=port, - timeout=timeout, - retries=retries, - weight=2, - securityName=snmp_conf["zSnmpSecurityName"], - authType=snmp_conf["zSnmpAuthType"], - authPassphrase=snmp_conf["zSnmpAuthPassword"], - ) - ) + params = common_params.copy() + params.update({ + "authType": snmp_conf["zSnmpAuthType"], + "authPassphrase": snmp_conf["zSnmpAuthPassword"], + "weight": 2, + }) + configs.append(SnmpV3Config(**params)) else: - configs.append( - SnmpV3Config( - ip, - port=port, - timeout=timeout, - retries=retries, - weight=1, - securityName=snmp_conf["zSnmpSecurityName"], - ) - ) + params = common_params.copy() + params["weight"] = 1 + configs.append(SnmpV3Config(**params)) else: self.log.debug("Override acquired community strings") # Use a default set of SNMP community strings if the device @@ -408,16 +416,16 @@ def discoverDevice( defer.returnValue(None) try: - kw = dict( - deviceName=ip, - discoverProto=None, - devicePath=devicepath, - performanceMonitor=self.options.monitor, - locationPath=self.options.location, - groupPaths=self.options.groups, - systemPaths=self.options.systems, - productionState=prodState, - ) + kw = { + "deviceName": ip, + "discoverProto": None, + "devicePath": devicepath, + "performanceMonitor": self.options.monitor, + "locationPath": self.options.location, + "groupPaths": self.options.groups, + "systemPaths": self.options.systems, + "productionState": prodState, + } # If zProperties are set via a job, get them and pass them in if self.options.job: @@ -542,18 +550,18 @@ def discoverDevice( self.log.exception(e) if self.options.snmpMissing: self.sendEvent( - dict( - device=ip, - component=ip, - ipAddress=ip, - eventKey=ip, - eventClass=Status_Snmp, - summary=str(e), - severity=Info, - agent="Discover", - ) + { + "device": ip, + "component": ip, + "ipAddress": ip, + "eventKey": ip, + "eventClass": Status_Snmp, + "summary": str(e), + "severity": Info, + "agent": "Discover", + } ) - except Exception as e: + except Exception: self.log.exception("Failed device discovery for '%s'", ip) finally: self.log.info("Finished scanning device with address %s", ip) @@ -570,7 +578,7 @@ def collectNet(self): """ network = self.options.net # net option from the config file is a string - if isinstance(network, basestring): + if isinstance(network, six.string_types): network = [network] # in case someone uses 10.0.0.0,192.168.0.1 instead of # --net 10.0.0.0 --net 192.168.0.1 @@ -872,6 +880,15 @@ def buildOptions(self): default=False, help="Prefer SNMP name to DNS name when modeling via SNMP.", ) + self.parser.add_option( + "--remove-interface-ips", + dest="removeInterfaceIps", + action="store_true", + default=False, + help="Skip discovery on IPs already assigned to interfaces " + "(device components).", + ) + # --job: a development-only option that jobs will use to communicate # their existence to zendisc. Not for users, so help is suppressed. self.parser.add_option("--job", dest="job", help=SUPPRESS_HELP) diff --git a/Products/DataCollector/zenmodeler.py b/Products/DataCollector/zenmodeler.py index 795c109397..55bbf293dc 100755 --- a/Products/DataCollector/zenmodeler.py +++ b/Products/DataCollector/zenmodeler.py @@ -23,7 +23,6 @@ except ImportError: USE_WMI = False -import collections import cPickle as pickle import gzip import os @@ -39,8 +38,8 @@ import zope.component from metrology import Metrology +from twisted.internet import reactor, defer from twisted.internet.defer import succeed -from twisted.internet import reactor from twisted.python.failure import Failure from Products.DataCollector import Classifier @@ -54,13 +53,11 @@ ) from Products.ZenCollector.cyberark import get_cyberark from Products.ZenCollector.daemon import parseWorkerOptions, addWorkerOptions -from Products.ZenCollector.interfaces import IEventService from Products.ZenEvents.ZenEventClasses import Heartbeat, Error +from Products.ZenHub.interfaces import IEventService from Products.ZenHub.PBDaemon import FakeRemote, PBDaemon, HubDown -from Products.ZenUtils.DaemonStats import DaemonStats from Products.ZenUtils.Driver import drive, driveLater -from Products.ZenUtils.metricwriter import ThresholdNotifier -from Products.ZenUtils.Utils import unused, zenPath +from Products.ZenUtils.Utils import unused, zenPath, wait from Products.Zuul.utils import safe_hasattr as hasattr # needed for Twisted's PB (Perspective Broker) to work @@ -89,7 +86,7 @@ class ZenModeler(PBDaemon): metrics. """ - name = "zenmodeler" + mname = name = "zenmodeler" initialServices = PBDaemon.initialServices + ["ModelerService"] generateEvents = True @@ -104,15 +101,12 @@ def __init__(self, single=False): @param single: collect from a single device? @type single: boolean """ - PBDaemon.__init__(self) + super(ZenModeler, self).__init__() # FIXME: cleanup --force option #2660 self.options.force = True self.start = None self.startat = None - self.rrdStats = DaemonStats() - self.single = single - if self.options.device: - self.single = True + self.single = single if not self.options.device else True self.modelerCycleInterval = self.options.cycletime # get the minutes and convert to fraction of a day self.collage = float(self.options.collage) / 1440.0 @@ -120,7 +114,6 @@ def __init__(self, single=False): self.clients = [] self.finished = [] self.devicegen = None - self.counters = collections.Counter() self.configFilter = None self.configLoaded = False @@ -136,7 +129,7 @@ def __init__(self, single=False): self.log.debug('option "now" specified, starting immediately.') else: # self.startDelay = randint(10, 60) * 60 - self.startDelay = randint(10, 60) * 1 + self.startDelay = randint(10, 60) * 1 # noqa: S311 self.immediate = 0 self.log.info( 'option "now" not specified, waiting %s seconds to start.', @@ -166,14 +159,15 @@ def reportError(self, error): """ self.log.error("Error occured: %s", error) + @defer.inlineCallbacks def connected(self): - """ - Called after connected to the zenhub service - """ + """Invoked after connected to ZenHub.""" reactor.callLater(_CONFIG_PULLING_TIMEOUT, self._checkConfigLoad) - d = self.configure() - d.addCallback(self.heartbeat) - d.addErrback(self.reportError) + try: + yield self.configure() + self.heartbeat() + except Exception: + self.log.exception("failed to configure") def _checkConfigLoad(self): """ @@ -187,6 +181,7 @@ def _checkConfigLoad(self): ) reactor.callLater(_CONFIG_PULLING_TIMEOUT, self._checkConfigLoad) + @defer.inlineCallbacks def configure(self): """ Get our configuration from zenhub @@ -194,51 +189,36 @@ def configure(self): # add in the code to fetch cycle time, etc. self.log.info("Getting configuration from ZenHub...") - def inner(driver): - """ - Generator function to gather our configuration + svc = self.config() - @param driver: driver object - @type driver: driver object - """ - self.log.debug("fetching monitor properties") - yield self.config().callRemote("propertyItems") - items = dict(driver.next()) - # If the cycletime option is not specified or zero, then use the - # modelerCycleInterval value in the database. - if not self.options.cycletime: - self.modelerCycleInterval = items.get( - "modelerCycleInterval", _DEFAULT_CYCLE_INTERVAL - ) - self.configCycleInterval = items.get( - "configCycleInterval", self.configCycleInterval + self.log.debug("fetching monitor properties") + items = yield svc.callRemote("propertyItems") + items = dict(items) + # If the cycletime option is not specified or zero, then use the + # modelerCycleInterval value in the database. + if not self.options.cycletime: + self.modelerCycleInterval = items.get( + "modelerCycleInterval", _DEFAULT_CYCLE_INTERVAL ) - reactor.callLater(self.configCycleInterval * 60, self.configure) - - self.log.debug("Getting threshold classes...") - yield self.config().callRemote("getThresholdClasses") - self.remote_updateThresholdClasses(driver.next()) - - self.log.debug("Getting collector thresholds...") - yield self.config().callRemote("getCollectorThresholds") - thresholds = driver.next() - threshold_notifier = ThresholdNotifier(self.sendEvent, thresholds) + self.configCycleInterval = items.get( + "configCycleInterval", self.configCycleInterval + ) + reactor.callLater(self.configCycleInterval * 60, self.configure) - self.rrdStats.config( - self.name, - self.options.monitor, - self.metricWriter(), - threshold_notifier, - self.derivativeTracker(), - ) + self.log.debug("Getting threshold classes...") + classes = yield svc.callRemote("getThresholdClasses") + self.remote_updateThresholdClasses(classes) - self.log.debug("Getting collector plugins for each DeviceClass") - yield self.config().callRemote("getClassCollectorPlugins") - self.classCollectorPlugins = driver.next() + self.log.debug("Getting collector thresholds...") + thresholds = yield svc.callRemote("getCollectorThresholds") + self.getThresholds().updateList(thresholds) - self.configLoaded = True + self.log.debug("Getting collector plugins for each DeviceClass") + self.classCollectorPlugins = yield svc.callRemote( + "getClassCollectorPlugins" + ) - return drive(inner) + self.configLoaded = True def config(self): """ @@ -355,7 +335,7 @@ def collectDevice(self, device): if USE_WMI: self.wmiCollect(device, ip, timeout) else: - self.log.info( + self.log.debug( "skipping WMI-based collection, PySamba zenpack not installed" ) self.log.info( @@ -381,7 +361,6 @@ def wmiCollect(self, device, ip, timeout): """ if self.options.nowmi: return - client = None try: plugins = self.selectPlugins(device, "wmi") @@ -391,7 +370,7 @@ def wmiCollect(self, device, ip, timeout): if self.checkCollection(device): self.log.info("WMI collector method for device %s", device.id) self.log.info( - "plugins: %s", ", ".join(map(lambda p: p.name(), plugins)) + "plugins: %s", ", ".join(p.name() for p in plugins) ) client = WMIClient(device, self, plugins) if not client or not plugins: @@ -421,9 +400,11 @@ def pythonCollect(self, device, ip, timeout): if self.checkCollection(device): self.log.info("Python collection device %s", device.id) self.log.info( - "plugins: %s", ", ".join(map(lambda p: p.name(), plugins)) + "plugins: %s", ", ".join(p.name() for p in plugins) ) client = PythonClient(device, self, plugins) + else: + self.log.info("no Python collection for device %s", device.id) if not client or not plugins: self.log.warn("Python client creation failed") return @@ -459,6 +440,7 @@ def cmdCollect(self, device, ip, timeout): # don't even create a client if we shouldn't collect/model yet if not self.checkCollection(device): + self.log.info("no cmd collection for device %s", device.id) return if protocol == "ssh": @@ -474,7 +456,7 @@ def cmdCollect(self, device, ip, timeout): ) clientType = "ssh" self.log.info( - "Using SSH collection method for device %s", hostname + "using SSH collection method for device %s", hostname ) elif protocol == "telnet": @@ -491,7 +473,7 @@ def cmdCollect(self, device, ip, timeout): ) clientType = "telnet" self.log.info( - "Using telnet collection method for device %s", hostname + "using telnet collection method for device %s", hostname ) else: @@ -516,10 +498,10 @@ def cmdCollect(self, device, ip, timeout): return if not client: - self.log.warn("Shell command collector creation failed") + self.log.warn("shell command collector creation failed") else: self.log.info( - "plugins: %s", ", ".join(map(lambda p: p.name(), plugins)) + "plugins: %s", ", ".join(p.name() for p in plugins) ) except Exception: self.log.exception("Error opening command collector") @@ -544,23 +526,28 @@ def snmpCollect(self, device, ip, timeout): return if not ip: - self.log.info("No manage IP for %s", hostname) + self.log.info("no manage IP for %s", hostname) return plugins = [] plugins = self.selectPlugins(device, "snmp") if not plugins: - self.log.info("No SNMP plugins found for %s", hostname) + self.log.info("no SNMP plugins found for %s", hostname) return if self.checkCollection(device): self.log.info("SNMP collection device %s", hostname) self.log.info( - "plugins: %s", ", ".join(map(lambda p: p.name(), plugins)) + "plugins: %s", ", ".join(p.name() for p in plugins) ) client = SnmpClient( device.id, ip, self.options, device, self, plugins ) + self.log.info( + "SNMP config summary %s", client.connInfo.summary() + ) + else: + self.log.info("no SNMP collection for device %s", hostname) if not client or not plugins: self.log.warn("SNMP collector creation failed") return @@ -601,13 +588,13 @@ def snmpCollect(self, device, ip, timeout): # # return drive(inner) - def addClient(self, device, timeout, clientType, name): + def addClient(self, client, timeout, clientType, name): """ If device is not None, schedule the device to be collected. Otherwise log an error. - @param device: device to collect against - @type device: string + @param client: modelling client + @type client: object @param timeout: timeout before failing the connection @type timeout: integer @param clientType: description of the plugin type @@ -615,11 +602,11 @@ def addClient(self, device, timeout, clientType, name): @param name: plugin name @type name: string """ - if device: - device.timeout = timeout - device.timedOut = False - self.clients.append(device) - device.run() + if client: + client.timeout = timeout + client.timedOut = False + self.clients.append(client) + client.run() else: self.log.warn( "Unable to create a %s collector for %s", clientType, name @@ -649,11 +636,13 @@ def portscanCollect(self, device, ip, timeout): "Portscan collector method for device %s", hostname ) self.log.info( - "plugins: %s", ", ".join(map(lambda p: p.name(), plugins)) + "plugins: %s", ", ".join(p.name() for p in plugins) ) client = PortscanClient( device.id, ip, self.options, device, self, plugins ) + else: + self.log.info("no portscan collection for device %s", hostname) if not client or not plugins: self.log.warn("Portscan collector creation failed") return @@ -690,7 +679,7 @@ def clientFinished(self, collectorClient): @type: Twisted deferred object """ device = collectorClient.device - self.log.debug("Client for %s finished collecting", device.id) + self.log.info("Client for %s finished collecting", device.id) def processClient(driver): try: @@ -906,17 +895,23 @@ def heartbeat(self, ignored=None): # We start modeling from here to accomodate the startup delay. if not self.started: - if self.immediate == 0 and self.startat: - # This stuff relies on ARBITRARY_BEAT being < 60s - if self.timeMatches(): - self.started = True - self.log.info("Starting modeling...") - reactor.callLater(1, self.main) + if self.immediate == 0: + if self.startat: + # This stuff relies on ARBITRARY_BEAT being < 60s + if self.timeMatches(): + # Run modeling in case we have now=False, startat is not None and local time matches the startat + self.started = True + self.log.info("Starting modeling...") + reactor.callLater(1, self.main) elif not self.isMainScheduled: + # Or run modeling by cycleTime in case we have now=False, startat is None + # and we haven't set schedule by cycleTime yet self.isMainScheduled = True reactor.callLater(self.cycleTime(), self.main) else: - self.started = True + # Going back to the normal modeling schedule either cron or cycleTime + # after the first immediate modeling during service startup + self.immediate = 0 self.log.info( "Starting modeling in %s seconds.", self.startDelay ) @@ -958,6 +953,7 @@ def _devicegen_has_items(self): self.devicegen = chain([first], self.devicegen) return result + @defer.inlineCallbacks def checkStop(self, unused=None): """ Check to see if there's anything to do. @@ -997,6 +993,12 @@ def checkStop(self, unused=None): if not self.options.cycle: self.stop() self.finished = [] + # frequency of heartbeat rate could be 2 times per minute in + # case we have cron job modeling faster than 1 minute it'll be + # trigger a second time. + if runTime < 60 and self.startat is not None: + yield wait(60) + self.started = False def fillCollectionSlots(self, driver): """ @@ -1041,6 +1043,7 @@ def fillCollectionSlots(self, driver): else: self.log.info("Device %s not returned is it down?", device) except StopIteration: + self.log.info("no more devices") self.devicegen = None finally: self.pendingNewClients = False @@ -1057,8 +1060,6 @@ def timeMatches(self): Check whether the current time matches a cron-like specification, return a straight true or false """ - if self.startat is None: - return True def match_entity(entity, value): if entity == "*": @@ -1265,7 +1266,7 @@ def processOptions(self): if USE_WMI: setNTLMv2Auth(self.options) - configFilter = parseWorkerOptions(self.options.__dict__) + configFilter = parseWorkerOptions(self.options.__dict__, self.log) if configFilter: self.configFilter = configFilter @@ -1350,7 +1351,7 @@ def mainLoop(self, driver): @return: Twisted deferred object @rtype: Twisted deferred object """ - if self.options.cycle: + if self.options.cycle and self.startat is None: self.isMainScheduled = True driveLater(self.cycleTime(), self.mainLoop) diff --git a/Products/Jobber/bin.py b/Products/Jobber/bin.py new file mode 100644 index 0000000000..e6cd5bac6e --- /dev/null +++ b/Products/Jobber/bin.py @@ -0,0 +1,95 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +def main(): + import sys + + import Products.Jobber + + from celery.bin.celery import main + from Products.ZenUtils.Utils import load_config + + # work-around for celery's `--help` bug. + _print_help_when_requested() + + # Dynamic configuration shenanigans because Celery can't be re-configured + # after its initial configuration has been set. + _configure_celery() + + load_config("signals.zcml", Products.Jobber) + + # All calls to celery need the 'app instance' for zenjobs. + sys.argv[1:] = ["-A", "Products.Jobber.zenjobs"] + sys.argv[1:] + + sys.exit(main()) + + +# Note: an empty tuple implies repetition of the key +_import_names = { + "inspect": ("control", "inspect"), + "list": ("list", "list_"), + "report": ("celery", "report"), + "help": ("celery", "help"), +} + + +def _get_command(modname, cmdname): + import importlib + + module = importlib.import_module("celery.bin.{}".format(modname)) + return getattr(module, cmdname) + + +def _print_help_when_requested(): + import sys + from Products.Jobber.zenjobs import app + + if "--help" not in sys.argv: + return + + name = sys.argv[1] + + if name == "--help": + sys.argv[1:] = ["help"] + return + + if name == "monitor": + from Products.Jobber.monitor.command import MonitorCommand + + w = MonitorCommand(app=app) + p = w.create_parser("zenjobs", "monitor") + else: + modname, cmdname = _import_names.get(sys.argv[1], (name, name)) + command = _get_command(modname, cmdname) + cmd = command(app=app) + p = cmd.create_parser(sys.argv[0], name) + + p.print_help() + sys.exit(0) + + +def _configure_celery(): + import argparse + import sys + from Products.Jobber import config + + # If '--help' was passed as an argument, don't attempt configuration. + if "--help" in sys.argv: + return + + parser = argparse.ArgumentParser() + parser.add_argument("--config-file") + + args, remainder = parser.parse_known_args() + if not args.config_file: + return + + cfg = config.getConfig(args.config_file) + config.ZenCeleryConfig = config.from_config(cfg) + sys.argv[1:] = remainder diff --git a/Products/Jobber/config.py b/Products/Jobber/config.py index 80f685daaa..e065962f6d 100644 --- a/Products/Jobber/config.py +++ b/Products/Jobber/config.py @@ -9,13 +9,13 @@ import os +import attr + from Products.ZenUtils.config import Config, ConfigLoader from Products.ZenUtils.GlobalConfig import getGlobalConfiguration -from Products.ZenUtils.RedisUtils import DEFAULT_REDIS_URL, getRedisUrl +from Products.ZenUtils.RedisUtils import DEFAULT_REDIS_URL from Products.ZenUtils.Utils import zenPath -__all__ = ("Celery", "ZenJobs") - _default_configs = { "logpath": "/opt/zenoss/log", @@ -33,7 +33,7 @@ "concurrent-jobs": 1, "job-hard-time-limit": 21600, # 6 hours "job-soft-time-limit": 18000, # 5 hours - + "zenjobs-worker-alive-timeout": 300.0, # 5 minutes "redis-url": DEFAULT_REDIS_URL, } @@ -50,22 +50,35 @@ "zenjobs-job-expires": int, "zodb-max-retries": int, "zodb-retry-interval-limit": int, + "zenjobs-worker-alive-timeout": float, } -def _getConfig(): +_configuration = {} + + +def getConfig(filename=None): """Return a dict containing the configuration for zenjobs.""" - conf = _default_configs.copy() + global _configuration + + configfile_contents = {} + if filename is not None: + if not os.path.exists(filename): + filename = zenPath("etc", filename) + try: + configfile_contents = ConfigLoader([filename], Config)() + except IOError as ex: + # Re-raise exception if the error is not "File not found" + if ex.errno != 2: + raise + + conf = _configuration.setdefault(filename, {}) + if conf: + return conf + + conf.update(_default_configs) conf.update(getGlobalConfiguration()) - - app_conf_file = zenPath("etc", "zenjobs.conf") - app_config_loader = ConfigLoader(app_conf_file, Config) - try: - conf.update(app_config_loader()) - except IOError as ex: - # Re-raise exception if the error is not "File not found" - if ex.errno != 2: - raise + conf.update(configfile_contents) # Convert the configuration value types to useable types. for key, cast in _xform.items(): @@ -76,64 +89,84 @@ def _getConfig(): return conf -ZenJobs = _getConfig() - - # Broker settings -def _buildBrokerUrl(): - usr = ZenJobs.get("amqpuser") - pwd = ZenJobs.get("amqppassword") - host = ZenJobs.get("amqphost") - port = ZenJobs.get("amqpport") - vhost = ZenJobs.get("amqpvhost") +def buildBrokerUrl(cfg): + usr = cfg.get("amqpuser") + pwd = cfg.get("amqppassword") + host = cfg.get("amqphost") + port = cfg.get("amqpport") + vhost = cfg.get("amqpvhost") return "amqp://{usr}:{pwd}@{host}:{port}/{vhost}".format(**locals()) -class Celery(object): +@attr.s(slots=True, kw_only=True) +class CeleryConfig(object): """Celery configuration.""" - BROKER_URL = _buildBrokerUrl() - CELERY_ACCEPT_CONTENT = ["without-unicode"] - - # List of modules to import when the Celery worker starts - CELERY_IMPORTS = ("Products.Jobber.jobs",) - - # Result backend (redis) - CELERY_RESULT_BACKEND = ZenJobs.get("redis-url") - CELERY_RESULT_SERIALIZER = "without-unicode" - CELERY_TASK_RESULT_EXPIRES = ZenJobs.get("zenjobs-job-expires") - - # Worker configuration - CELERYD_CONCURRENCY = ZenJobs.get("concurrent-jobs") - CELERYD_PREFETCH_MULTIPLIER = 1 - CELERYD_MAX_TASKS_PER_CHILD = ZenJobs.get("max-jobs-per-worker") - CELERYD_TASK_TIME_LIMIT = ZenJobs.get("job-hard-time-limit") - CELERYD_TASK_SOFT_TIME_LIMIT = ZenJobs.get("job-soft-time-limit") - - # Task settings - CELERY_ACKS_LATE = True - CELERY_IGNORE_RESULT = False - CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True - CELERY_TASK_SERIALIZER = "without-unicode" - CELERY_TRACK_STARTED = True - - # Beat (scheduler) configuration - CELERYBEAT_MAX_LOOP_INTERVAL = ZenJobs.get("scheduler-max-loop-interval") - CELERYBEAT_LOG_FILE = os.path.join( - ZenJobs.get("logpath"), "zenjobs-scheduler.log" + broker_url = attr.ib() + result_backend = attr.ib() + result_expires = attr.ib() + worker_concurrency = attr.ib() + worker_max_tasks_per_child = attr.ib() + task_time_limit = attr.ib() + task_soft_time_limit = attr.ib() + beat_max_loop_interval = attr.ib() + worker_proc_alive_timeout = attr.ib() + + timezone = attr.ib(default=None) + accept_content = attr.ib(default=["without-unicode"]) + imports = attr.ib( + default=[ + "Products.Jobber.jobs", + "Products.ZenCollector.configcache.tasks", + "Products.ZenModel.IpNetwork", # ensure task is registered + "Products.ZenModel.ZDeviceLoader", # ensure task is registered + ] ) - CELERYBEAT_REDIRECT_STDOUTS = True - CELERYBEAT_REDIRECT_STDOUTS_LEVEL = "INFO" - - # Event settings - CELERY_SEND_EVENTS = True - CELERY_SEND_TASK_SENT_EVENT = True - - # Log settings - CELERYD_LOG_COLOR = False - - -# Timezone -_tz = os.environ.get("TZ") -if _tz: - Celery.CELERY_TIMEZONE = _tz + task_routes = attr.ib( + default={ + "configcache.build_device_config": {"queue": "configcache"}, + "configcache.build_oidmap": {"queue": "configcache"}, + } + ) + result_extended = attr.ib(default=True) + result_serializer = attr.ib(default="without-unicode") + worker_prefetch_multiplier = attr.ib(default=1) + task_acks_late = attr.ib(default=True) + task_ignore_result = attr.ib(default=False) + task_store_errors_even_if_ignored = attr.ib(default=True) + task_serializer = attr.ib(default="without-unicode") + task_track_started = attr.ib(default=True) + worker_send_task_events = attr.ib(default=True) + task_send_sent_event = attr.ib(default=True) + worker_log_color = attr.ib(default=False) + + # Are these still used? + CELERYBEAT_REDIRECT_STDOUTS = attr.ib(default=True) + CELERYBEAT_REDIRECT_STDOUTS_LEVEL = attr.ib(default="INFO") + + +def from_config(cfg=None): + cfg = cfg if cfg is not None else {} + args = { + "broker_url": buildBrokerUrl(cfg), + "result_backend": cfg.get("redis-url"), + "result_expires": cfg.get("zenjobs-job-expires"), + "worker_concurrency": cfg.get("concurrent-jobs"), + "worker_max_tasks_per_child": cfg.get("max-jobs-per-worker"), + "task_time_limit": cfg.get("job-hard-time-limit"), + "task_soft_time_limit": cfg.get("job-soft-time-limit"), + "beat_max_loop_interval": cfg.get( + "scheduler-max-loop-interval" + ), + "worker_proc_alive_timeout": cfg.get("zenjobs-worker-alive-timeout"), + } + tz = os.environ.get("TZ") + if tz: + args["timezone"] = tz + + return CeleryConfig(**args) + + +# Initialized with default values (for when --config-file is not specified) +ZenCeleryConfig = from_config(getConfig()) diff --git a/Products/Jobber/interfaces.py b/Products/Jobber/interfaces.py index 9881667d31..42dbc366ee 100644 --- a/Products/Jobber/interfaces.py +++ b/Products/Jobber/interfaces.py @@ -7,7 +7,7 @@ # ############################################################################## -from __future__ import absolute_import +from __future__ import absolute_import, unicode_literals from celery import states from zope.interface import Interface @@ -19,60 +19,64 @@ class IJobRecord(Interface): """ """ jobid = TextLine( - title=u"Job ID", - description=u"The Job's unique identifier", + title="Job ID", + description="The Job's unique identifier", ) name = TextLine( - title=u"Name", - description=u"The full class name of the job", + title="Name", + description="The full class name of the job", ) summary = TextLine( - title=u"Summary", - description=u"A brief and general summary of the job's function", + title="Summary", + description="A brief and general summary of the job's function", ) description = TextLine( - title=u"Description", - description=u"A description of what this job will do", + title="Description", + description="A description of what this job will do", ) userid = TextLine( - title=u"User ID", - description=u"The user that created the job", + title="User ID", + description="The user that created the job", ) logfile = TextLine( - title=u"Logfile", - description=u"Path to this job's log file.", + title="Logfile", + description="Path to this job's log file.", ) status = Choice( - title=u"Status", - description=u"The current status of the job", + title="Status", + description="The current status of the job", vocabulary=SimpleVocabulary.fromValues(states.ALL_STATES), ) created = Datetime( - title=u"Created", description=u"When the job was created" + title="Created", + description="When the job was created", ) started = Datetime( - title=u"Started", description=u"When the job began executing" + title="Started", + description="When the job began executing", ) finished = Datetime( - title=u"Finished", description=u"When the job finished executing" + title="Finished", + description="When the job finished executing", ) duration = Timedelta( - title=u"Duration", description=u"How long the job has run" + title="Duration", + description="How long the job has run", ) complete = Bool( - title=u"Complete", - description=u"True if the job has finished running", + title="Complete", + description="True if the job has finished running", ) def abort(): diff --git a/Products/Jobber/jobs/facade.py b/Products/Jobber/jobs/facade.py index 4a7ed29877..d5288e5134 100644 --- a/Products/Jobber/jobs/facade.py +++ b/Products/Jobber/jobs/facade.py @@ -11,7 +11,7 @@ import inspect -from celery.utils import fun_takes_kwargs +from ..utils.utils import fun_takes_kwargs from zope.dottedname.resolve import resolve from ..exceptions import FacadeMethodJobFailed @@ -103,3 +103,7 @@ def _run(self, facadefqdn, method, *args, **kwargs): result, ) return result + + +from Products.Jobber.zenjobs import app +app.register_task(FacadeMethodJob) diff --git a/Products/Jobber/jobs/job.py b/Products/Jobber/jobs/job.py index 2d55541e61..93fa8df1b7 100644 --- a/Products/Jobber/jobs/job.py +++ b/Products/Jobber/jobs/job.py @@ -9,7 +9,7 @@ from __future__ import absolute_import -from ..config import ZenJobs +from ..config import getConfig from ..exceptions import NoSuchJobException from ..task import Abortable, DMD, ZenTask from ..zenjobs import app @@ -81,7 +81,7 @@ def setProperties(self, **properties): self.dmd.JobManager.update(jobid, **details) def _get_config(self, key, default=_MARKER): - value = ZenJobs.get(key, default) + value = getConfig().get(key, default) if value is _MARKER: raise KeyError("Config option '{}' is not defined".format(key)) return value diff --git a/Products/Jobber/jobs/misc.py b/Products/Jobber/jobs/misc.py index 3905d56174..61fcb63105 100644 --- a/Products/Jobber/jobs/misc.py +++ b/Products/Jobber/jobs/misc.py @@ -73,6 +73,11 @@ def _run(self, seconds, *args, **kw): raise DelayedFailureError("slept for %s seconds" % seconds) +app.register_task(DeviceListJob) +app.register_task(PausingJob) +app.register_task(DelayedFailure) + + @app.task( bind=True, base=requires(DMD, Abortable), diff --git a/Products/Jobber/jobs/purge_logs.py b/Products/Jobber/jobs/purge_logs.py index 3391d0fe14..b9b30e89c3 100644 --- a/Products/Jobber/jobs/purge_logs.py +++ b/Products/Jobber/jobs/purge_logs.py @@ -11,7 +11,7 @@ import os -from ..config import ZenJobs +from ..config import getConfig from ..task import requires, Abortable from ..zenjobs import app @@ -29,7 +29,7 @@ def purge_logs(self): key.replace(backend.task_keyprefix, "") for key in backend.client.keys("%s*" % backend.task_keyprefix) ) - logpath = ZenJobs.get("job-log-path") + logpath = getConfig().get("job-log-path") logfiles = os.listdir(logpath) if not logfiles: self.log.info("No log files to remove") diff --git a/Products/Jobber/jobs/roles.py b/Products/Jobber/jobs/roles.py index 013d33909d..6a899285e6 100644 --- a/Products/Jobber/jobs/roles.py +++ b/Products/Jobber/jobs/roles.py @@ -37,3 +37,7 @@ def _run(self, organizerUid, *args, **kwargs): self.log.info("About to set local roles for uid: %s ", organizerUid) organizer = self.dmd.unrestrictedTraverse(organizerUid) organizer._setDeviceLocalRoles() + + +from Products.Jobber.zenjobs import app +app.register_task(DeviceSetLocalRolesJob) diff --git a/Products/Jobber/jobs/subprocess.py b/Products/Jobber/jobs/subprocess.py index 9bdbbe4495..0efad8ff65 100644 --- a/Products/Jobber/jobs/subprocess.py +++ b/Products/Jobber/jobs/subprocess.py @@ -130,6 +130,10 @@ def _handle_process(self, process): reader.join(timeout=1.0) +from Products.Jobber.zenjobs import app +app.register_task(SubprocessJob) + + @contextmanager def null_context(): """Do nothing context manager.""" diff --git a/Products/Jobber/log.py b/Products/Jobber/log.py index 28070a41b4..21b64c322a 100644 --- a/Products/Jobber/log.py +++ b/Products/Jobber/log.py @@ -24,7 +24,7 @@ from Products.ZenUtils.Utils import zenPath -from .config import ZenJobs +from .config import getConfig from .interfaces import IJobStore from .utils.algorithms import partition from .utils.log import ( @@ -37,111 +37,103 @@ TaskLogFileHandler, ) -_default_log_level = logging.getLevelName(ZenJobs.get("logseverity")) - _default_config = { - "worker": { - "version": 1, - "disable_existing_loggers": False, - "filters": { - "main": { - "()": "Products.Jobber.utils.log.WorkerFilter", - }, - }, - "formatters": { - "main": { - "()": "Products.Jobber.utils.log.TaskFormatter", - "base": ( - "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " - "worker=%(instance)s/%(processName)s: %(message)s" - ), - "task": ( - "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " - "worker=%(instance)s/%(processName)s " - "task=%(taskname)s taskid=%(taskid)s: %(message)s " - ), - "datefmt": "%Y-%m-%d %H:%M:%S", - }, - }, - "handlers": { - "main": { - "formatter": "main", - "class": "cloghandler.ConcurrentRotatingFileHandler", - "filename": os.path.join( - ZenJobs.get("logpath"), "zenjobs.log" - ), - "maxBytes": ZenJobs.get("maxlogsize") * 1024, - "backupCount": ZenJobs.get("maxbackuplogs"), - "mode": "a", - "filters": ["main"], - }, - }, - "loggers": { - "STDOUT": { - "level": _default_log_level, - }, - "zen": { - "level": _default_log_level, - }, - "zen.zenjobs": { - "level": _default_log_level, - "propagate": False, - "handlers": ["main"], - }, - "zen.zenjobs.job": { - "level": _default_log_level, - "propagate": False, - }, - "celery": { - "level": _default_log_level, - }, - }, - "root": { - "handlers": ["main"], + "version": 1, + "disable_existing_loggers": False, + "filters": { + "main": { + "()": "Products.Jobber.utils.log.WorkerFilter", }, }, - "beat": { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "beat": { - "format": ( - "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " - "%(message)s" - ), - "datefmt": "%Y-%m-%d %H:%M:%S", - }, - }, - "handlers": { - "beat": { - "formatter": "beat", - "class": "cloghandler.ConcurrentRotatingFileHandler", - "filename": os.path.join( - ZenJobs.get("logpath"), "zenjobs-scheduler.log" - ), - "maxBytes": ZenJobs.get("maxlogsize") * 1024, - "backupCount": ZenJobs.get("maxbackuplogs"), - "mode": "a", - }, + "formatters": { + "main": { + "()": "Products.Jobber.utils.log.TaskFormatter", + "base": ( + "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " + "worker=%(instance)s/%(processName)s: %(message)s" + ), + "task": ( + "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " + "worker=%(instance)s/%(processName)s " + "task=%(taskname)s taskid=%(taskid)s: %(message)s " + ), + "datefmt": "%Y-%m-%d %H:%M:%S", }, - "loggers": { - "STDOUT": { - "level": _default_log_level, - }, - "zen": { - "level": _default_log_level, - }, - "celery": { - "level": _default_log_level, - }, - }, - "root": { - "handlers": ["beat"], + "beat": { + "format": ( + "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " + "%(message)s" + ), + "datefmt": "%Y-%m-%d %H:%M:%S", }, }, + "handlers": {}, + "loggers": { + "STDOUT": {}, + "zen": {}, + "celery": {}, + }, + "root": { + "handlers": [], + }, +} + +_main_loggers = { + "zen.zenjobs": { + "propagate": False, + "handlers": ["main"], + }, + "zen.zenjobs.job": { + "propagate": False, + }, +} +_configcache_loggers = {} + +_main_handler = { + "formatter": "main", + "class": "cloghandler.ConcurrentRotatingFileHandler", + "filename": None, + "mode": "a", + "filters": ["main"], } -_loglevelconf_filepath = zenPath("etc", "zenjobs_log_levels.conf") +_beat_handler = { + "formatter": "beat", + "class": "cloghandler.ConcurrentRotatingFileHandler", + "filename": None, + "mode": "a", +} + + +def _get_handler(handler): + cfg = dict(handler) + cfg.update( + { + "maxBytes": getConfig().get("maxlogsize") * 1024, + "backupCount": getConfig().get("maxbackuplogs"), + } + ) + return cfg + + +def _get_filenames(cfg): + logpath = cfg.get("logpath") + return { + "zenjobs": os.path.join(logpath, "zenjobs.log"), + "beat": os.path.join(logpath, "zenjobs-scheduler.log"), + "configcache_builder": os.path.join( + logpath, "configcache-builder.log" + ), + } + + +_loglevel_confs = { + "zenjobs": zenPath("etc", "zenjobs_log_levels.conf"), + "beat": zenPath("etc", "zenjobs_log_levels.conf"), + "configcache_builder": zenPath( + "etc", "configcache_builder_log_levels.conf" + ), +} def _get_logger(name=None): @@ -152,23 +144,37 @@ def _get_logger(name=None): return get_logger(name) -def get_default_config(name): - """Return the default logging configuration for the given name. - - :rtype: dict - """ - return _default_config[name] - - -def configure_logging(logfile=None, **kw): +def configure_logging(logfile, **kw): """Configure logging for zenjobs.""" - # Sketchy hack. Determine the logging config based on whether - # the logfile parameter is not None. - config_name = "worker" if logfile is None else "beat" - logging.config.dictConfig(get_default_config(config_name)) - - if os.path.exists(_loglevelconf_filepath): - levelconfig = load_log_level_config(_loglevelconf_filepath) + cfg = getConfig() + default_log_level = logging.getLevelName(cfg.get("logseverity")) + filenames = _get_filenames(cfg) + + _default_config["loggers"]["STDOUT"]["level"] = default_log_level + _default_config["loggers"]["zen"]["level"] = default_log_level + _default_config["loggers"]["celery"]["level"] = default_log_level + + # NOTE: Cleverly used the `-f` command line argument to specify + # which logging configuration to use. + if logfile in ("zenjobs", "configcache_builder"): + _main_loggers["zen.zenjobs"]["level"] = default_log_level + _main_loggers["zen.zenjobs.job"]["level"] = default_log_level + _default_config["loggers"].update(**_main_loggers) + _default_config["root"]["handlers"].append("main") + handler = _get_handler(_main_handler) + handler["filename"] = filenames[logfile] + _default_config["handlers"]["main"] = handler + elif logfile == "beat": + _default_config["root"]["handlers"].append("beat") + handler = _get_handler(_beat_handler) + handler["filename"] = filenames[logfile] + _default_config["handlers"]["beat"] = handler + + logging.config.dictConfig(_default_config) + + loglevelconf_filename = _loglevel_confs[logfile] + if os.path.exists(loglevelconf_filename): + levelconfig = load_log_level_config(loglevelconf_filename) apply_levels(levelconfig) stdout_logger = logging.getLogger("STDOUT") @@ -181,7 +187,7 @@ def configure_logging(logfile=None, **kw): sys.__stderr__ = errproxy sys.stderr = errproxy - if config_name == "beat": + if logfile == "beat": # The celery.beat module has a novel approach to getting its # logger, so fixing things so log messages can get sent where # we see them. @@ -356,8 +362,12 @@ class LogLevelUpdater(object): @classmethod def start(cls): + logfilename = logging._handlers.get("main").baseFilename + name = ( + logfilename.rsplit("/", 1)[-1].rsplit(".", 1)[0].replace("-", "_") + ) if cls.instance is None: - cls.instance = _LogLevelUpdaterThread(_loglevelconf_filepath) + cls.instance = _LogLevelUpdaterThread(_loglevel_confs[name]) cls.instance.start() elif not cls.instance.is_alive(): cls.instance = None @@ -409,7 +419,7 @@ def run(self): def _get_hash(config): - return hashlib.md5( + return hashlib.sha256( "".join("{0}{1}".format(k, config[k]) for k in sorted(config)) ).hexdigest() diff --git a/Products/Jobber/manager.py b/Products/Jobber/manager.py index 64f17deff7..4daca3d0ac 100644 --- a/Products/Jobber/manager.py +++ b/Products/Jobber/manager.py @@ -515,8 +515,12 @@ def _getByStatusAndType(statuses, jobtype=None): def _getJobTypeStr(jobtype): if isinstance(jobtype, type): - return jobtype.name - task = app.tasks.get(str(jobtype)) - if not task: - raise ValueError("No such job: {!r}".format(jobtype)) - return task.name + name = jobtype.name + else: + task = app.tasks.get(str(jobtype)) + if not task: + raise ValueError("No such task: {!r}".format(jobtype)) + name = task.name + if name is None: + raise ValueError("zenjobs task name is None: {!r}".format(jobtype)) + return name diff --git a/Products/Jobber/meta.py b/Products/Jobber/meta.py new file mode 100644 index 0000000000..f5e1b71866 --- /dev/null +++ b/Products/Jobber/meta.py @@ -0,0 +1,73 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2012-2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function, unicode_literals + +from celery import signals +from zope.configuration.exceptions import ConfigurationError +from zope.configuration.fields import GlobalObject +from zope.interface import Interface +from zope.schema import TextLine + + +class IJob(Interface): + """Registers a ZenJobs task.""" + + name = TextLine(title="Name", description="Unused", required=False) + + task = GlobalObject( + title="ZenJobs Task", + description="Path to a task class or function", + required=False, + ) + + class_ = task # old name for backward compatibility + + +def job(_context, **kw): + """Register the task with Celery, if necessary.""" + from Products.Jobber.zenjobs import app + + task = kw.get("task") + if task is None: + task = kw.get("class_") + if task is None: + raise ConfigurationError( + ("Missing parameter:", "'task' or 'class'") + ) + + if not task.name or task.name not in app.tasks: + try: + registered_task = app.register_task(task) + registered_task.__class__.name = registered_task.name + except Exception as e: + raise Exception("Task registration failed: %s" % e) + + +class ICelerySignal(Interface): + """Registers a Celery signal handler.""" + + name = TextLine( + title="Name", + description="The signal receiving a handler", + ) + + handler = TextLine( + title="Handler", + description="Classpath to the function handling the signal", + ) + + +def signal(_context, name, handler): + """Register a Celery signal handler.""" + signal = getattr(signals, name, None) + if signal is None: + raise AttributeError("Unknown signal name '%s'" % name) + handler_fn = _context.resolve(handler) + signal.connect(handler_fn) diff --git a/Products/Jobber/meta.zcml b/Products/Jobber/meta.zcml index 0e19b2b00f..e9e5dd1a12 100644 --- a/Products/Jobber/meta.zcml +++ b/Products/Jobber/meta.zcml @@ -1,19 +1,20 @@ + diff --git a/Products/Jobber/metaconfigure.py b/Products/Jobber/metaconfigure.py deleted file mode 100644 index 81670e2227..0000000000 --- a/Products/Jobber/metaconfigure.py +++ /dev/null @@ -1,26 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2012-2019 all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -from __future__ import absolute_import, print_function - -from celery import signals - - -def job(_context, class_, name=None): - """Hold place for unused job directive.""" - pass - - -def signal(_context, name, handler): - """Register a Celery signal handler.""" - signal = getattr(signals, name, None) - if signal is None: - raise AttributeError("Unknown signal name '%s'" % name) - handler_fn = _context.resolve(handler) - signal.connect(handler_fn) diff --git a/Products/Jobber/metadirectives.py b/Products/Jobber/metadirectives.py deleted file mode 100644 index 16b6b93908..0000000000 --- a/Products/Jobber/metadirectives.py +++ /dev/null @@ -1,43 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2012-2019 all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -from __future__ import absolute_import, unicode_literals - -from zope.configuration.fields import GlobalObject -from zope.interface import Interface -from zope.schema import TextLine - - -class IJob(Interface): - """Registers a ZenJobs Job class.""" - - class_ = GlobalObject( - title=u"Job Class", - description=u"The class of the job to register", - ) - - name = TextLine( - title=u"Name", - description=u"Optional name of the job", - required=False, - ) - - -class ICelerySignal(Interface): - """Registers a Celery signal handler.""" - - name = TextLine( - title=u"Name", - description=u"The signal receiving a handler", - ) - - handler = TextLine( - title=u"Handler", - description=u"Classpath to the function handling the signal", - ) diff --git a/Products/Jobber/model.py b/Products/Jobber/model.py index 6318c30d00..b04caebda7 100644 --- a/Products/Jobber/model.py +++ b/Products/Jobber/model.py @@ -22,14 +22,14 @@ from Products.Zuul.interfaces import IMarshaller, IInfo -from .config import ZenJobs +from .config import getConfig from .interfaces import IJobStore, IJobRecord from .storage import Fields from .task.utils import job_log_has_errors +from .utils.app import get_app from .utils.log import inject_logger -from .zenjobs import app -mlog = logging.getLogger("zen.zenjobs.model") +_mlog = logging.getLogger("zen.zenjobs.model") sortable_keys = list(set(Fields) - {"details"}) @@ -121,7 +121,7 @@ def job_name(self): @property def job_type(self): - task = app.tasks.get(self.name) + task = get_app().tasks.get(self.name) if task is None: return self.name if self.name else "" try: @@ -153,7 +153,7 @@ def wait(self): @property def result(self): - return app.tasks[self.name].AsyncResult(self.jobid) + return get_app().tasks[self.name].AsyncResult(self.jobid) def __eq__(self, other): if not isinstance(other, type(self)): @@ -249,7 +249,7 @@ def from_task(cls, task, jobid, args, kwargs, **fields): summary=task.summary, description=description, logfile=os.path.join( - ZenJobs.get("job-log-path"), "%s.log" % jobid + getConfig().get("job-log-path"), "%s.log" % jobid ), ) if "status" in fields: @@ -280,15 +280,14 @@ def from_signal(cls, body, headers, properties): """Return a RedisRecord object built from the arguments passed to a before_task_publish signal handler. """ - jobid = body.get("id") - taskname = body.get("task") - args = body.get("args", ()) - kwargs = body.get("kwargs", {}) + jobid = headers.get("id") + taskname = headers.get("task") + args, kwargs, _ = body return cls._build(jobid, taskname, args, kwargs, headers, properties) @classmethod def _build(cls, jobid, taskname, args, kwargs, headers, properties): - task = app.tasks[taskname] + task = get_app().tasks[taskname] fields = {} description = properties.pop("description", None) if description: @@ -301,7 +300,7 @@ def _build(cls, jobid, taskname, args, kwargs, headers, properties): return cls.from_task(task, jobid, args, kwargs, **fields) -@inject_logger(log=mlog) +@inject_logger(log=_mlog) def save_jobrecord(log, body=None, headers=None, properties=None, **ignored): """Save the Zenoss specific job metadata to redis. @@ -313,19 +312,26 @@ def save_jobrecord(log, body=None, headers=None, properties=None, **ignored): :param dict headers: Headers to accompany message sent to Celery worker :param dict properties: Additional task and custom key/value pairs """ + if headers is None: + # If headers is None, bad signal so ignore. + log.info("no headers, bad signal?") + return + if not body: # If body is empty (or None), no job to save. log.info("no body, so no job") return - if headers is None: - # If headers is None, bad signal so ignore. - log.info("no headers, bad signal?") + if not isinstance(body, tuple): + # body is not in protocol V2 format + log.warning("task data not in protocol V2 format") return - task = app.tasks.get(body.get("task")) + taskname = headers.get("task") + task = get_app().tasks.get(taskname) + if task is None: - log.warn("Ignoring unknown task: %s", body.get("task")) + log.warn("Ignoring unknown task: %s", taskname) return # If the result of tasks is ignored, don't create a job record. @@ -350,8 +356,10 @@ def save_jobrecord(log, body=None, headers=None, properties=None, **ignored): if not saved: return + _, _, canvas = body + # Iterate over the callbacks. - callbacks = body.get("callbacks") or [] + callbacks = canvas.get("callbacks") or [] links = [] for cb in callbacks: links.extend(cb.flatten_links()) @@ -380,14 +388,14 @@ def _save_record(log, storage, record): return False -@inject_logger(log=mlog) +@inject_logger(log=_mlog) def stage_jobrecord(log, storage, sig): """Save Zenoss job data to redis with status "STAGED". :param sig: The job data :type sig: celery.canvas.Signature """ - task = app.tasks.get(sig.task) + task = get_app().tasks.get(sig.task) # Tasks with ignored results cannot be tracked, # so don't insert a record into Redis. @@ -405,14 +413,14 @@ def stage_jobrecord(log, storage, sig): _save_record(log, storage, record) -@inject_logger(log=mlog) +@inject_logger(log=_mlog) def commit_jobrecord(log, storage, sig): """Update STAGED job records to PENDING. :param sig: The job data :type sig: celery.canvas.Signature """ - task = app.tasks.get(sig.task) + task = get_app().tasks.get(sig.task) # Tasks with ignored results cannot be tracked, # so there won't be a record to update. @@ -441,7 +449,7 @@ def wrapper(log, *args, **kw): return wrapper -@inject_logger(log=mlog) +@inject_logger(log=_mlog) @_catch_exception def job_start(log, task_id, task=None, **ignored): if task is not None and task.ignore_result: @@ -465,7 +473,7 @@ def job_start(log, task_id, task=None, **ignored): log.info("status=%s started=%s", status, tm) -@inject_logger(log=mlog) +@inject_logger(log=_mlog) @_catch_exception def job_end(log, task_id, task=None, **ignored): if task is not None and task.ignore_result: @@ -490,14 +498,14 @@ def job_end(log, task_id, task=None, **ignored): log.info("Job total duration is %0.3f seconds", finished - started) -@inject_logger(log=mlog) +@inject_logger(log=_mlog) @_catch_exception def job_success(log, result, sender=None, **ignored): if sender is not None and sender.ignore_result: return task_id = sender.request.id jobstore = getUtility(IJobStore, "redis") - status = app.backend.get_status(task_id) + status = get_app().backend.get_status(task_id) if job_log_has_errors(task_id): log.warn("Error messages detected in job log.") status = states.FAILURE @@ -506,12 +514,12 @@ def job_success(log, result, sender=None, **ignored): log.info("status=%s finished=%s", status, tm) -@inject_logger(log=mlog) +@inject_logger(log=_mlog) @_catch_exception def job_failure(log, task_id, exception=None, sender=None, **ignored): if sender is not None and sender.ignore_result: return - status = app.backend.get_status(task_id) + status = get_app().backend.get_status(task_id) jobstore = getUtility(IJobStore, "redis") if task_id not in jobstore: @@ -536,13 +544,13 @@ def job_failure(log, task_id, exception=None, sender=None, **ignored): jobstore.update(cbid, status=ABORTED, finished=tm) -@inject_logger(log=mlog) +@inject_logger(log=_mlog) @_catch_exception def job_retry(log, request, reason=None, sender=None, **ignored): if sender is not None and sender.ignore_result: return jobstore = getUtility(IJobStore, "redis") task_id = request.id - status = app.backend.get_status(task_id) + status = get_app().backend.get_status(task_id) jobstore.update(task_id, status=status) log.info("status=%s", status) diff --git a/Products/Jobber/monitor.py b/Products/Jobber/monitor.py deleted file mode 100644 index ae17b1b4cd..0000000000 --- a/Products/Jobber/monitor.py +++ /dev/null @@ -1,137 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2019, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -from __future__ import absolute_import, print_function - -# import ast - -from collections import defaultdict -from datetime import timedelta - -from celery.bin.base import Command -from zope.component import getUtility - -from .interfaces import IJobStore -from .utils.datetime import humanize_timedelta - - -def catch_error(f): - # Decorator that catches and prints the exception thrown from the - # decorated function. - def call_func(*args, **kw): - try: - return f(*args, **kw) - except Exception as ex: - print(ex) - - return call_func - - -class ZenJobsMonitor(Command): - """Monitor Celery events.""" - - @catch_error - def task_failed(self, event): - self.state.event(event) - jobid = event["uuid"] - instance = self.state.tasks.get(jobid) - job = self.app.tasks.get(instance.name) - result = job.AsyncResult(jobid) - classkey, summary = _getErrorInfo(self.app, result.result) - # args = ast.literal_eval(instance.args) - # kwargs = ast.literal_eval(instance.kwargs) - name = job.getJobType() if hasattr(job, "getJobType") else job.name - print( - "Job failed worker=%s jobid=%s name=%s" - % (event["hostname"], jobid, name) - ) - - def run(self, **kw): - self.state = self.app.events.State( - on_node_join=on_node_join, - on_node_leave=on_node_leave, - ) - self.seconds_since = defaultdict(float) - self.storage = getUtility(IJobStore, "redis") - - conn = self.app.connection().clone() - - def _error_handler(exc, interval): - print("Internal error: %s" % (exc,)) - - while True: - print("Begin monitoring for zenjobs/celery events") - try: - conn.ensure_connection(_error_handler) - recv = self.app.events.Receiver( - conn, - handlers={ - "task-failed": self.task_failed, - "*": self.state.event, - }, - ) - recv.capture(wakeup=True) - except (KeyboardInterrupt, SystemExit): - return conn and conn.close() - except conn.connection_errors + conn.channel_errors: - print("Connection lost, attempting reconnect") - - -def _getTimeoutSummary(app, ex): - return "Job killed after {}.".format( - humanize_timedelta( - timedelta( - seconds=app.conf.get("CELERYD_TASK_SOFT_TIME_LIMIT"), - ), - ), - ) - - -def _getAbortedSummary(app, ex): - return "Job aborted by user" - - -def _getErrorSummary(app, ex): - return "{0.__class__.__name__}: {0}".format(ex) - - -_error_eventkey_map = { - "TaskAborted": ("zenjobs-aborted", _getAbortedSummary), - "SoftTimeLimitExceeded": ("zenjobs-timeout", _getTimeoutSummary), -} - - -def _getErrorInfo(app, ex): - """Returns (eventkey, summary).""" - key, summary_fn = _error_eventkey_map.get( - type(ex).__name__, ("zenjobs-failure", _getErrorSummary) - ) - return key, summary_fn(app, ex) - - -def on_node_join(*args, **kw): - worker = args[0] - print( - "Worker node added to monitor worker=%s uptime=%s" - % ( - worker.hostname, - humanize_timedelta(timedelta(seconds=worker.clock)), - ), - ) - - -def on_node_leave(*args, **kw): - worker = args[0] - print( - "Worker node left monitor worker=%s uptime=%s" - % ( - worker.hostname, - humanize_timedelta(timedelta(seconds=worker.clock)), - ), - ) diff --git a/bin/zeneventmigrate b/Products/Jobber/monitor/__init__.py old mode 100755 new mode 100644 similarity index 62% rename from bin/zeneventmigrate rename to Products/Jobber/monitor/__init__.py index 21eb6b0652..dd3ac4a06c --- a/bin/zeneventmigrate +++ b/Products/Jobber/monitor/__init__.py @@ -1,13 +1,12 @@ -#! /usr/bin/env bash ############################################################################## -# -# Copyright (C) Zenoss, Inc. 2011, all rights reserved. -# +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from .command import MonitorCommand -. $ZENHOME/bin/zenfunctions -$PYTHON $ZENHOME/Products/ZenEvents/zeneventmigrate.py "$CMD" "$@" +__all__ = ("MonitorCommand",) diff --git a/Products/Jobber/monitor/broker.py b/Products/Jobber/monitor/broker.py new file mode 100644 index 0000000000..bc15309646 --- /dev/null +++ b/Products/Jobber/monitor/broker.py @@ -0,0 +1,76 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import requests + +from six.moves.urllib.parse import urlparse, urljoin, quote_plus, unquote + +from .logger import getLogger + + +class Broker(object): + def __new__(cls, broker_url): + scheme = urlparse(broker_url).scheme + if scheme == "amqp": + return RabbitMQ(broker_url) + + +class RabbitMQ(object): + """Just enough API to satisfy collecting metrics from the broker.""" + + def __init__(self, broker_url): + parsed = urlparse(broker_url) + self._host = parsed.hostname + self._port = 15672 + self._vhost = quote_plus(parsed.path[1:]) + username = parsed.username + password = parsed.password + self._username = unquote(username) if username else username + self._password = unquote(password) if password else password + self._http_api = ( + "http://{username}:{password}@{host}:{port}/api/" + ).format( + username=self._username, + password=self._password, + host=self._host, + port=self._port, + ) + self._log = getLogger(self) + + def queues(self, names): + if not names: + return () + attempts = 1 + timeout = 1.0 + url = urljoin(self._http_api, "queues/" + self._vhost) + params = {"columns": ",".join(["name", "messages"])} + while True: + try: + r = requests.get(url, params=params, timeout=timeout) + except requests.Timeout: + if attempts < 3: + attempts += 1 + timeout *= 2 + else: + self._log.warning( + "timed out requesting data from RabbitMQ" + ) + return () + except Exception: + self._log.exception( + "unexpected error while requesting data from RabbitMQ" + ) + else: + break + + if r.status_code != 200: + r.raise_for_status() + return tuple(q for q in r.json() if q["name"] in names) diff --git a/Products/Jobber/monitor/collector.py b/Products/Jobber/monitor/collector.py new file mode 100644 index 0000000000..fde6df5f61 --- /dev/null +++ b/Products/Jobber/monitor/collector.py @@ -0,0 +1,183 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import math +import threading +import time + +from itertools import chain + +from Products.ZenUtils.controlplane import configuration as cc_config + +from .logger import getLogger + +# from itertools import izip_longest + +# Metrics +# ------- +# celery..pending.count - Count of queued tasks +# celery..running.count - Count of running tasks +# celery..cycletime.mean - Average runtime of tasks +# celery..leadtime.mean - Average lifetime of tasks +# celery..success.percent - Percentage of successful runs +# celery..failure.percent - Percentage of failed runs +# celery..retry.percent - Percentage of retried runs +# +# Where is "zenjobs" or "builder" and is the +# lower-cased name of the job. + + +class MetricsCollector(threading.Thread): + def __init__(self, broker, inspector, reporter, metrics, interval=60): + super(MetricsCollector, self).__init__() + self._broker = broker + self._inspector = inspector + self._metrics = metrics + self._reporter = reporter + self._interval = interval + self._stopEvent = threading.Event() + self._log = getLogger(self) + + def stop(self): + self._stopEvent.set() + + def run(self): + while not self._stopEvent.is_set(): + self._stopEvent.wait(self._interval) + if not self._stopEvent.is_set(): + try: + self.task() + except Exception: + self._log.exception("error while collecting metrics") + + def task(self): + self._log.debug("begin metric collection") + try: + running_counts = self._inspector.running_counts() + if not running_counts: + self._log.warning("count of running tasks not collected") + services = self._inspector.workers() + if not services: + self._log.warning("no information about workers") + queues = { + str(queue["name"]): queue["messages"] + for queue in self._broker.queues( + [info["queue"] for info in services.values()] + ) + } + if not queues: + self._log.warning("no information about queues") + report = self._metrics.report() + + mgen = _MetricGenerator(services, running_counts, queues, report) + + common_tags = { + "serviceId": cc_config.service_id, + "tenantId": cc_config.tenant_id, + } + with self._reporter.session(tags=common_tags) as session: + for metric in mgen(): + session.add(**metric) + + if self._log.getEffectiveLevel() == logging.DEBUG: + for metric in session.metrics: + self._log.debug(metric) + finally: + self._log.debug("finished metric collection") + + +class _MetricGenerator(object): + def __init__(self, services, running_counts, queues, report): + self._now = time.time() + self._running_counts = running_counts + self._services = services + self._serviceids = { + str(name): str(info["serviceid"]) + for name, info in services.iteritems() + } + self._queues = queues + self._report = report + + def __call__(self): + return chain(self._counts(), self._percents(), self._timings()) + + def _counts(self): + for service, info in self._services.iteritems(): + pending_count = self._queues.get(info["queue"]) + if pending_count is not None: + yield ( + { + "metric": "celery.{}.pending.count".format(service), + "value": pending_count, + "timestamp": self._now, + } + ) + running_count = self._running_counts.get(service) + if running_count is not None: + yield ( + { + "metric": "celery.{}.running.count".format(service), + "value": running_count, + "timestamp": self._now, + } + ) + + def _percents(self): + results = self._report.get("results") + for service, result in results.iteritems(): + success = result["success_percent"] + failure = result["failure_percent"] + retry = result["retry_percent"] + if not math.isnan(success): + yield ( + { + "metric": "celery.{}.success.percent".format(service), + "value": success, + "timestamp": self._now, + } + ) + if not math.isnan(failure): + yield ( + { + "metric": "celery.{}.failure.percent".format(service), + "value": failure, + "timestamp": self._now, + } + ) + if not math.isnan(retry): + yield ( + { + "metric": "celery.{}.retry.percent".format(service), + "value": retry, + "timestamp": self._now, + } + ) + + def _timings(self): + cycletime_services = self._report["cycletime"]["services"] + leadtime_services = self._report["leadtime"]["services"] + for service, cycletimes in cycletime_services.iteritems(): + yield ( + { + "metric": "celery.{}.cycletime.mean".format(service), + "value": cycletimes["mean"], + "timestamp": self._now, + } + ) + leadtimes = leadtime_services.get(service) + yield ( + { + "metric": "celery.{}.leadtime.mean".format(service), + "value": leadtimes["mean"], + "timestamp": self._now, + } + ) diff --git a/Products/Jobber/monitor/command.py b/Products/Jobber/monitor/command.py new file mode 100644 index 0000000000..3e1c27fbb3 --- /dev/null +++ b/Products/Jobber/monitor/command.py @@ -0,0 +1,121 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import Queue +import signal + +from celery.bin.base import Command + +from Products.ZenCollector.configcache.app.args import ZenHelpFormatter +from Products.ZenUtils.config import ConfigLoader +from Products.ZenUtils.Utils import zenPath + +from .broker import Broker +from .collector import MetricsCollector +from .events import EventsMonitor +from .handler import EventsHandler +from .inspector import Inspector +from .logger import configure_logging, getLogger +from .metrics import ZenJobsMetrics +from .reporter import MetricsReporter + + +class MonitorCommand(Command): + # Override create_parser to get a different formatter class. + # @override + def create_parser(self, prog_name, command=None): + # for compatibility with optparse usage. + usage = self.usage(command).replace("%prog", "%(prog)s") + parser = self.Parser( + prog=prog_name, + usage=usage, + epilog=self._format_epilog(self.epilog), + formatter_class=ZenHelpFormatter, + description=self._format_description(self.description), + ) + self._add_version_argument(parser) + self.add_preload_arguments(parser) + self.add_arguments(parser) + self.add_compat_options(parser, self.get_options()) + self.add_compat_options(parser, self.app.user_options["preload"]) + + if self.supports_args: + # for backward compatibility with optparse, we automatically + # add arbitrary positional args. + parser.add_argument(self.args_name, nargs="*") + return self.prepare_parser(parser) + + # @override + def add_arguments(self, parser): + parser.add_argument( + "--conf-file", + default=zenPath("etc", "zenjobs-monitor.conf"), + help="Pathname of configuration file", + ) + + # @override + def run(self, *args, **options): + conf_file = options["conf_file"] + config = ConfigLoader(conf_file)() + metric_interval = config.getint("metric-interval") + log_filename = config.get("log-filename") + log_level = config.get("log-level") + log_max_file_count = config.getint("log-max-file-count") + log_max_file_size = config.getint("log-max-file-size") * 1024 + configure_logging( + level=log_level, + filename=log_filename, + maxcount=log_max_file_count, + maxsize=log_max_file_size, + ) + log = getLogger(self) + try: + eventqueue = Queue.Queue() + reporter = MetricsReporter() + metrics = ZenJobsMetrics() + broker_url = self.app.connection().as_uri(include_password=True) + + broker = Broker(broker_url) + inspector = Inspector(self.app) + + handler = EventsHandler(eventqueue, metrics, self.app) + monitor = EventsMonitor(eventqueue, self.app) + collector = MetricsCollector( + broker, inspector, reporter, metrics, metric_interval + ) + + handler.start() + monitor.start() + collector.start() + + state = {"shutdown": False} + + def _handle_signal(state, signum, frame): + state["shutdown"] = True + + signal.signal( + signal.SIGTERM, lambda sn, fr: _handle_signal(state, sn, fr) + ) + + while True: + try: + signal.pause() + if state["shutdown"]: + break + except (KeyboardInterrupt, SystemExit): + break + except Exception: + log.exception("unexpected error") + finally: + collector.stop() + handler.stop() + handler.join() + collector.join(timeout=1.0) diff --git a/Products/Jobber/monitor/events.py b/Products/Jobber/monitor/events.py new file mode 100644 index 0000000000..89ef656f16 --- /dev/null +++ b/Products/Jobber/monitor/events.py @@ -0,0 +1,53 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import threading +import time + +from celery.events import EventReceiver + +from .logger import getLogger + + +class EventsMonitor(threading.Thread): + + daemon = True # doesn't block shutdown + + def __init__(self, sink, app): + """Initialize an EventsMonitor instance. + + @param sink: Events are written to this object. + @type sink: Queue.Queue + @param app: The Celery application + @type app: celery.Celery + """ + super(EventsMonitor, self).__init__() + self._sink = sink + self._app = app + self._log = getLogger(self) + + def run(self): + try_interval = 1 + while True: + try: + try_interval *= 2 + with self._app.connection() as conn: + recv = EventReceiver( + conn, handlers={"*": self._put}, app=self._app + ) + try_interval = 1 + recv.capture(limit=None, timeout=None, wakeup=True) + except Exception: + self._log.exception("unexpected error") + time.sleep(try_interval) + + def _put(self, event): + self._sink.put(event) diff --git a/Products/Jobber/monitor/handler.py b/Products/Jobber/monitor/handler.py new file mode 100644 index 0000000000..73612e39c8 --- /dev/null +++ b/Products/Jobber/monitor/handler.py @@ -0,0 +1,145 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import Queue +import threading + +from collections import defaultdict + +from celery.events.state import State + +from .logger import getLogger + + +class EventsHandler(threading.Thread): + def __init__(self, source, metrics, app): + """Initialize an EventsHandler instance. + + @param source: Events are read from this object. + @type source: Queue.Queue + @param metrics: + @type metrics: ZenJobsMetrics + @param app: The Celery application + @type app: celery.Celery + """ + super(EventsHandler, self).__init__() + self._source = source + self._metrics = metrics + self._app = app + self._stopEvent = threading.Event() + self._queue_svc_map = {} + self._handlers = { + "worker-online": self._online, + "worker-offline": self._offline, + "task-sent": self._sent, + "task-succeeded": self._succeeded, + "task-retried": self._retried, + "task-failed": self._failed, + } + self._heartbeats = defaultdict(int) + self._log = getLogger(self) + + def run(self): + self._log.info("started handling celery events") + state = State() + while not self._stopEvent.is_set(): + try: + event = self._source.get(True, 0.5) + state.event(event) + + event_type = event["type"] + + handler = self._handlers.get(event_type) + if not handler: + continue + + if event_type.startswith("task-"): + task_id = event["uuid"] + arg = state.tasks.get(task_id) + else: + arg = state.workers.get(event["hostname"]) + + try: + handler(arg) + except Exception: + self._log.exception("event handler failed: %r", handler) + except Queue.Empty: + pass + except Exception: + self._log.exception("unexpected error") + self._log.info("stopped handling celery events") + + def stop(self): + self._stopEvent.set() + + def _get_svc_from_node(self, node): + return node.split("@")[0].split("-")[0] + + def _online(self, worker): + self._log.info("worker online worker=%s", worker.hostname) + + def _offline(self, worker): + self._log.warning("worker offline worker=%s", worker.hostname) + + def _build_queue_svc_mapping(self): + inspect = self._app.control.inspect() + active_queues = inspect.active_queues() + for node, queues in active_queues.items(): + svcname = self._get_svc_from_node(node) + qname = queues[0]["name"] + if qname not in self._queue_svc_map: + self._queue_svc_map[qname] = svcname + + def _get_svc_from_queue(self, qname): + if qname not in self._queue_svc_map: + self._build_queue_svc_mapping() + return self._queue_svc_map.get(qname) + + def _sent(self, task): + if not task.sent: + return + svcid = self._get_svc_from_queue(task.queue) + if svcid is None: + self._log.warning( + "no service for tasks on queue '%s' found", task.queue + ) + else: + with self._metrics as updater: + updater.count_sent(svcid) + + def _succeeded(self, task): + if not task.received or not task.started: + return + svcid = self._get_svc_from_node(task.hostname) + with self._metrics as updater: + updater.mark_success(svcid) + updater.add_task_runtime(svcid, task.name, task.runtime) + _completed(task, svcid, updater) + + def _failed(self, task): + svcid = self._get_svc_from_node(task.hostname) + with self._metrics as updater: + updater.mark_failure(svcid) + _completed(task, svcid, updater) + + def _retried(self, task): + svcid = self._get_svc_from_node(task.hostname) + with self._metrics as updater: + updater.mark_retry(svcid) + _completed(task, svcid, updater) + + +def _completed(task, svcid, metrics): + if not task.sent: + return + leadtime = task.timestamp - task.sent + metrics.count_completed(svcid) + metrics.add_task_leadtime(svcid, task.name, leadtime) diff --git a/Products/Jobber/monitor/inspector.py b/Products/Jobber/monitor/inspector.py new file mode 100644 index 0000000000..50d15391a1 --- /dev/null +++ b/Products/Jobber/monitor/inspector.py @@ -0,0 +1,59 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from collections import defaultdict + +from .logger import getLogger + + +class Inspector(object): + """Just enough API to satisfy collecting metrics from Celery.""" + + def __init__(self, app, timeout=10): + self._app = app + self._timeout = timeout + self._workers = defaultdict(dict) + self._log = getLogger(self) + + def running_counts(self): + inspect = self._app.control.inspect(timeout=self._timeout) + result = inspect.active() + if result is None or "error" in result: + self._log.warning("inspect method 'active' failed: %s", result) + return {} + running = {} + for node, tasks in result.items(): + service = _get_service_from_node(node) + count = running.get(service, 0) + running[service] = count + len(tasks) + return running + + def workers(self): + inspect = self._app.control.inspect(timeout=self._timeout) + result = inspect.active_queues() + if result is None or "error" in result: + self._log.warning( + "inspect method 'active_queues' failed: %s", result + ) + return {} + return { + _get_service_from_node(node): { + "serviceid": _get_serviceid_from_node(node), + "queue": data[0]["name"], + } + for node, data in result.items() + } + + +def _get_serviceid_from_node(node): + return node.split("@")[1] + + +def _get_service_from_node(node): + return node.split("@")[0].split("-")[0] diff --git a/Products/Jobber/monitor/logger.py b/Products/Jobber/monitor/logger.py new file mode 100644 index 0000000000..42f59a767b --- /dev/null +++ b/Products/Jobber/monitor/logger.py @@ -0,0 +1,73 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import copy +import logging +import logging.config + + +def getLogger(obj): + return logging.getLogger( + "zen.zenjobs.monitor.{}".format( + type(obj).__module__.split(".")[-1].lower() + ) + ) + + +def configure_logging(level=None, filename=None, maxcount=None, maxsize=None): + config = copy.deepcopy(_logging_config) + common_handler = config["handlers"]["default"] + common_handler.update( + { + "filename": filename, + "maxBytes": maxsize, + "backupCount": maxcount, + } + ) + config["loggers"]["zen.zenjobs.monitor"]["level"] = level.upper() + logging.config.dictConfig(config) + + +_logging_config = { + "version": 1, + "disable_existing_loggers": True, + "formatters": { + "default": { + "format": ( + "%(asctime)s.%(msecs).0f %(levelname)s %(name)s: %(message)s" + ), + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + }, + "handlers": { + "default": { + "formatter": "default", + "class": "cloghandler.ConcurrentRotatingFileHandler", + "filename": None, + "maxBytes": None, + "backupCount": None, + "mode": "a", + "filters": [], + }, + }, + "loggers": { + "zen": { + "level": "INFO", + "handlers": ["default"], + }, + "zen.zenjobs.monitor": { + "level": "INFO", + }, + }, + "root": { + "handlers": [], + }, +} diff --git a/Products/Jobber/monitor/metrics.py b/Products/Jobber/monitor/metrics.py new file mode 100644 index 0000000000..5d6ba4127f --- /dev/null +++ b/Products/Jobber/monitor/metrics.py @@ -0,0 +1,225 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from collections import defaultdict +from threading import RLock + +from metrology.instruments import HistogramUniform, Meter +from metrology.instruments.gauge import PercentGauge + + +class ZenJobsMetrics(object): + def __init__(self): + self._lock = RLock() # synchronize thread access + self._metrics = _BagOfMetrics() + + def __enter__(self): + self._lock.acquire() + return self._metrics + + def __exit__(self, *exc_info): + self._lock.release() + + def report(self): + with self._lock: + cycletime = _get_timings( + self._metrics.cycletime, + self._metrics.cycletime_task, + self._metrics.cycletime_service, + self._metrics.cycletime_service_task, + ) + leadtime = _get_timings( + self._metrics.leadtime, + self._metrics.leadtime_task, + self._metrics.leadtime_service, + self._metrics.leadtime_service_task, + ) + + results = { + service: { + "success_rate": self._metrics.successes[service].mean_rate, + "success_percent": self._metrics.success_pct[ + service + ].value, + "retry_rate": self._metrics.retries[service].mean_rate, + "retry_percent": self._metrics.retry_pct[service].value, + "failure_rate": self._metrics.failures[service].mean_rate, + "failure_percent": self._metrics.failure_pct[ + service + ].value, + } + for service in self._metrics.services + } + + return { + "cycletime": cycletime, + "leadtime": leadtime, + "results": results, + } + + +class _BagOfMetrics(object): + def __init__(self): + # cache of service IDs + self.services = set() + + # Task runtimes; + # {service-id: {task-name: histogram}} + self.cycletime_service_task = {} + # {task-name: histogram} + self.cycletime_task = {} + # {service-id: histogram} + self.cycletime_service = {} + # All tasks on all services + self.cycletime = HistogramUniform() + + # Total lifetime of tasks; + # {service-id: {task-name: histogram}} + self.leadtime_service_task = {} + # {task-name: histogram} + self.leadtime_task = {} + # {service-id: histogram} + self.leadtime_service = {} + # All tasks on all services + self.leadtime = HistogramUniform() + + # Task run rates + # {service-id: meter} + self.failures = defaultdict(Meter) + self.retries = defaultdict(Meter) + self.successes = defaultdict(Meter) + self.completed = defaultdict(Meter) + + # Percentages by service + # {service-id: PercentGauge} + self.success_pct = PercentMetricsGroup(self.successes, self.completed) + self.failure_pct = PercentMetricsGroup(self.failures, self.completed) + self.retry_pct = PercentMetricsGroup(self.retries, self.completed) + + def add_task_runtime(self, service, task, runtime): + millisecs = int(runtime * 1000) + + if service not in self.cycletime_service_task: + self.cycletime_service_task[service] = {} + if task not in self.cycletime_service_task[service]: + self.cycletime_service_task[service][task] = HistogramUniform() + self.cycletime_service_task[service][task].update(millisecs) + + if task not in self.cycletime_task: + self.cycletime_task[task] = HistogramUniform() + self.cycletime_task[task].update(millisecs) + + if service not in self.cycletime_service: + self.cycletime_service[service] = HistogramUniform() + self.cycletime_service[service].update(millisecs) + + self.cycletime.update(millisecs) + self.services.add(service) + + def add_task_leadtime(self, service, task, leadtime): + millisecs = int(leadtime * 1000) + + if service not in self.leadtime_service_task: + self.leadtime_service_task[service] = {} + if task not in self.leadtime_service_task[service]: + self.leadtime_service_task[service][task] = HistogramUniform() + self.leadtime_service_task[service][task].update(millisecs) + + if task not in self.leadtime_task: + self.leadtime_task[task] = HistogramUniform() + self.leadtime_task[task].update(millisecs) + + if service not in self.leadtime_service: + self.leadtime_service[service] = HistogramUniform() + self.leadtime_service[service].update(millisecs) + + self.leadtime.update(millisecs) + self.services.add(service) + + def count_sent(self, service): + self.services.add(service) + + def count_completed(self, service): + self.completed[service].mark() + self.services.add(service) + + def mark_success(self, service): + self.successes[service].mark() + self.services.add(service) + + def mark_retry(self, service): + self.retries[service].mark() + self.services.add(service) + + def mark_failure(self, service): + self.failures[service].mark() + self.services.add(service) + + +def _get_timings(total, bytask, byservice, byservicetask): + return { + "min": total.min, + "mean": total.mean, + "max": total.max, + "tasks": { + task: {"min": metric.min, "mean": metric.mean, "max": metric.max} + for task, metric in bytask.iteritems() + }, + "services": { + service: { + "min": metric.min, + "mean": metric.mean, + "max": metric.max, + "tasks": { + task: { + "min": metric.min, + "mean": metric.mean, + "max": metric.max, + } + for task, metric in byservicetask.get( + service, {} + ).iteritems() + }, + } + for service, metric in byservice.iteritems() + }, + } + + +class PercentMetricsGroup(object): + def __init__(self, numerators, denominators): + self._nums = numerators + self._dens = denominators + self._metrics = {} + + def get(self, name, default=None): + metric = self._metrics.get(name) + if metric is None: + metric = _TwoCountersGauge(self._nums[name], self._dens[name]) + self._metrics[name] = metric + return metric + + def __getitem__(self, key): + return self.get(key) + + +class _TwoCountersGauge(PercentGauge): + def __init__(self, numerator, denominator): + self._num = numerator + self._den = denominator + + # @override + def numerator(self): + return self._num.count + + # @override + def denominator(self): + return self._den.count diff --git a/Products/Jobber/monitor/reporter.py b/Products/Jobber/monitor/reporter.py new file mode 100644 index 0000000000..68e97336de --- /dev/null +++ b/Products/Jobber/monitor/reporter.py @@ -0,0 +1,100 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import contextlib +import json + +import attr +import requests + +from attr.validators import instance_of, deep_mapping + +from Products.ZenUtils.controlplane import configuration as cc_config +from Products.ZenUtils.MetricReporter import DEFAULT_METRIC_URL + +from .logger import getLogger + + +class MetricsReporter(object): + def __init__(self, url=None, prefix=""): + if not url: + url = cc_config.consumer_url + if not url: + url = DEFAULT_METRIC_URL + self._url = url + self._log = getLogger(self) + + @contextlib.contextmanager + def session(self, tags=None): + session = _Session(tags if tags is not None else {}) + try: + yield session + except Exception: + self._log.exception("metrics reporting session failed") + else: + self._post(session.metrics) + + def _post(self, metrics): + if not metrics: + return + session = requests.Session() + session.headers.update( + { + "Content-Type": "application/json", + "User-Agent": "Zenoss Service Metrics", + } + ) + body = {"metrics": [attr.asdict(sample) for sample in metrics]} + self._log.debug("sending metric payload: %s", body) + response = session.post(self._url, data=json.dumps(body)) + if response.status_code != 200: + self._log.warning( + "problem submitting metrics: %s, %s", + response.status_code, + response.text.replace("\n", "\\n"), + ) + else: + self._log.debug("%s metrics posted", len(metrics)) + + def build_metric(self, **kw): + return Metric(**kw) + + +class _Session(object): + def __init__(self, tags): + self._tags = tags + self.metrics = [] + + def add(self, metric, value, timestamp, tags=None): + tags = tags if tags is not None else {} + tags.update(self._tags) + self.metrics.append( + Metric(metric=metric, value=value, timestamp=timestamp, tags=tags) + ) + + +@attr.s(frozen=True, slots=True) +class Metric(object): + metric = attr.ib(converter=str) + value = attr.ib(converter=float) + timestamp = attr.ib(validator=instance_of(float)) + tags = attr.ib( + validator=deep_mapping( + key_validator=instance_of(str), + value_validator=instance_of(str), + mapping_validator=instance_of(dict), + ) + ) + + @tags.validator + def _verify_keys(self, attribute, value): + if "serviceId" not in value: + raise KeyError("Missing 'serviceId' tag") + if "tenantId" not in value: + raise KeyError("Missing 'tenantId' tag") diff --git a/Products/Jobber/scheduler.py b/Products/Jobber/scheduler.py index f3eeae9ce1..a6e8aabaf7 100644 --- a/Products/Jobber/scheduler.py +++ b/Products/Jobber/scheduler.py @@ -21,7 +21,7 @@ from celery.beat import Scheduler from celery.schedules import crontab -from .config import ZenJobs, Celery +from .config import getConfig, ZenCeleryConfig class ZenJobsScheduler(Scheduler): @@ -120,7 +120,7 @@ def schedule(self): @property def info(self): return " . schedule-file -> {}".format( - ZenJobs.get("scheduler-config-file"), + getConfig().get("scheduler-config-file"), ) def sync(self): @@ -142,7 +142,7 @@ def close(self): def load_schedule(): - configfile = ZenJobs.get("scheduler-config-file") + configfile = getConfig().get("scheduler-config-file") with open(configfile, "r") as f: raw = yaml.load(f, Loader=yaml.loader.SafeLoader) parsed_schedule = {} @@ -222,7 +222,7 @@ def _key(name): def _getClient(): """Create and return the ZenJobs JobStore client.""" - return redis.StrictRedis.from_url(Celery.CELERY_RESULT_BACKEND) + return redis.StrictRedis.from_url(ZenCeleryConfig.result_backend) def handle_beat_init(*args, **kw): diff --git a/Products/Jobber/serialization.py b/Products/Jobber/serialization.py index 3e3b400271..844911827b 100644 --- a/Products/Jobber/serialization.py +++ b/Products/Jobber/serialization.py @@ -9,6 +9,8 @@ from __future__ import absolute_import, print_function +import six + from json import ( loads as json_loads, dumps as json_dumps, @@ -25,7 +27,7 @@ def _process_list(seq): while stack: lst = stack.pop() for idx, item in enumerate(lst): - if isinstance(item, unicode): + if isinstance(item, six.text_type): lst[idx] = str(item) if isinstance(item, list): stack.append(item) @@ -37,7 +39,7 @@ def _decode_hook(*args, **kw): for n, i in enumerate(args): for pair in i: k, v = pair - if isinstance(v, unicode): + if isinstance(v, six.text_type): v = str(v) elif isinstance(v, list): v = _process_list(v) diff --git a/Products/Jobber/signals.zcml b/Products/Jobber/signals.zcml index 716ac4e83b..6166c9a65d 100644 --- a/Products/Jobber/signals.zcml +++ b/Products/Jobber/signals.zcml @@ -1,8 +1,20 @@ + + + + + + + + = 4.0 - # userid = getattr(self.request, "userid", None) - userid = self.request.headers.get("userid") + # Celery < 4.0 had a 'headers' attribute + headers = getattr(self.request, "headers", None) + if headers is not None: + userid = headers.get("userid") + else: + userid = getattr(self.request, "userid", None) with zodb(self.app.db, userid, self.log) as dmd: self.__dmd = dmd try: @@ -65,13 +74,17 @@ def __call__(self, *args, **kwargs): def __retry_on_conflict(self, *args, **kw): try: result = self.__run(*args, **kw) - transaction.commit() - self.log.debug("Transaction committed") + if not self.dmd_read_only: + transaction.commit() + self.log.debug("Transaction committed") + else: + transaction.abort() + self.log.debug("Transaction aborted reason=read-only-task") return result except (ReadConflictError, ConflictError) as ex: transaction.abort() self.log.warn("Transaction aborted reason=%s", ex) - limit = ZenJobs.get("zodb-retry-interval-limit", 30) + limit = getConfig().get("zodb-retry-interval-limit", 30) duration = int(SystemRandom().uniform(1, limit)) self.log.info( "Reschedule task to execute after %s seconds.", @@ -95,7 +108,7 @@ def zodb(db, userid, log): :param db: ZODB database connection. :param str userid: The ID of the user to authenticate with. """ - session = db.open() + session = db.open() # type: ZODB.Connection try: mlog.debug("Started ZODB session") root = session.root() diff --git a/Products/Jobber/task/event.py b/Products/Jobber/task/event.py index d5896ddd7b..4af174d300 100644 --- a/Products/Jobber/task/event.py +++ b/Products/Jobber/task/event.py @@ -41,7 +41,7 @@ def on_failure(self, exc, task_id, args, kwargs, einfo): def _send_event(task, exc, task_id, args, kwargs): - classkey, summary = _getErrorInfo(task.app, exc) + classkey, summary = _getErrorInfo(task, exc) name = task.getJobType() if hasattr(task, "getJobType") else task.name publisher = getUtility(IEventPublisher) event = Event.Event( @@ -67,21 +67,20 @@ def _send_event(task, exc, task_id, args, kwargs): mlog.info(*log_message) -def _getTimeoutSummary(app, ex): - return "Job killed after {}.".format( - humanize_timedelta( - timedelta( - seconds=app.conf.get("CELERYD_TASK_SOFT_TIME_LIMIT"), - ), - ), +def _getTimeoutSummary(task, ex): + _, soft_limit = task.request.timelimit or (None, None) + if soft_limit is None: + soft_limit = task.app.conf.get("task_soft_time_limit") + return "Job timed out after {}.".format( + humanize_timedelta(timedelta(seconds=soft_limit)) ) -def _getAbortedSummary(app, ex): +def _getAbortedSummary(task, ex): return "Job aborted by user" -def _getErrorSummary(app, ex): +def _getErrorSummary(task, ex): return "{0.__class__.__name__}: {0}".format(ex) @@ -91,9 +90,9 @@ def _getErrorSummary(app, ex): } -def _getErrorInfo(app, ex): +def _getErrorInfo(task, ex): """Returns (eventkey, summary).""" key, summary_fn = _error_eventkey_map.get( type(ex).__name__, ("zenjobs-failure", _getErrorSummary) ) - return key, summary_fn(app, ex) + return key, summary_fn(task, ex) diff --git a/Products/Jobber/task/utils.py b/Products/Jobber/task/utils.py index 9307d40bbf..1968760234 100644 --- a/Products/Jobber/task/utils.py +++ b/Products/Jobber/task/utils.py @@ -11,10 +11,11 @@ import inspect +from itertools import chain + from zope.component import getUtility from ..interfaces import IJobStore -from ..zenjobs import app def requires(*features): @@ -24,6 +25,8 @@ def requires(*features): (*features, ZenTask, celery.app.task.Task, object) where 'features' are the classes given to this function. """ + from ..zenjobs import app + bases = tuple(features) + (app.Task, object) culled = [] for feature in reversed(bases): @@ -31,7 +34,12 @@ def requires(*features): if cls not in culled: culled.insert(0, cls) name = "".join(t.__name__ for t in features) + "Task" - basetask = type(name, tuple(culled), {"abstract": True}) + throws = set( + chain.from_iterable(getattr(cls, "throws", ()) for cls in culled) + ) + basetask = type( + name, tuple(culled), {"abstract": True, "throws": tuple(throws)} + ) return basetask diff --git a/Products/Jobber/tests/test_dmd.py b/Products/Jobber/tests/test_dmd.py new file mode 100644 index 0000000000..72785e35a1 --- /dev/null +++ b/Products/Jobber/tests/test_dmd.py @@ -0,0 +1,77 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from unittest import TestCase + +from mock import MagicMock, patch + +from ..task import requires, DMD +from ..zenjobs import app + + +class DMDTest(TestCase): + """Test the DMD mixin class.""" + + def setUp(self): + log = logging.getLogger() + log.setLevel(logging.FATAL + 1) + + def tearDown(self): + log = logging.getLogger() + log.setLevel(logging.NOTSET) + + @app.task(bind=True, base=requires(DMD)) + def dmd_task_rw(self): + pass + + @app.task(bind=True, base=requires(DMD), dmd_read_only=True) + def dmd_task_ro(self): + pass + + def test_rw_defaults(t): + t.assertIsInstance(t.dmd_task_rw, DMD) + t.assertFalse(t.dmd_task_rw.dmd_read_only) + t.assertIsNone(t.dmd_task_rw.dmd) + + def test_ro_defaults(t): + t.assertIsInstance(t.dmd_task_ro, DMD) + t.assertTrue(t.dmd_task_ro.dmd_read_only) + t.assertIsNone(t.dmd_task_ro.dmd) + + @patch("Products.Jobber.task.dmd.transaction") + def test_rw(t, transaction_): + db = MyMagicMock() + app.db = db + try: + t.dmd_task_rw() + transaction_.abort.assert_not_called() + transaction_.commit.assert_called_with() + finally: + del app.db + + @patch("Products.Jobber.task.dmd.transaction") + def test_ro(t, transaction_): + db = MyMagicMock() + app.db = db + try: + t.dmd_task_ro() + transaction_.commit.assert_not_called() + transaction_.abort.assert_called_with() + finally: + del app.db + + +class MyMagicMock(MagicMock): + + def __of__(self, *args, **kw): + return self diff --git a/Products/Jobber/tests/test_jobstore_updates.py b/Products/Jobber/tests/test_jobstore_updates.py index e7dc549f38..f92941eb91 100644 --- a/Products/Jobber/tests/test_jobstore_updates.py +++ b/Products/Jobber/tests/test_jobstore_updates.py @@ -19,7 +19,6 @@ from zope.component import getGlobalSiteManager from ..model import ( - app, IJobStore, job_start, job_end, @@ -29,6 +28,7 @@ ) from ..storage import JobStore from .utils import subTest, RedisLayer +from ..zenjobs import app UNEXPECTED = type("UNEXPECTED", (object,), {})() PATH = {"src": "Products.Jobber.model"} @@ -60,7 +60,6 @@ def setUp(t): rootLogger.handlers = [] def tearDown(t): - t.layer.redis.flushall() getGlobalSiteManager().unregisterUtility( t.store, IJobStore, name="redis" ) @@ -127,7 +126,7 @@ def test_unknown_task_id(t): job_start("1") @patch("{src}.time".format(**PATH), autospec=True) - @patch("{src}.app.backend".format(**PATH), autospec=True) + @patch("Products.Jobber.zenjobs.app.backend", autospec=True) def test_success(t, _backend, _time): tm = 1597059131.762538 @@ -166,7 +165,7 @@ def test_unknown_task_id(t): job_start("1") @patch("{src}.time".format(**PATH), autospec=True) - @patch("{src}.app.backend".format(**PATH), autospec=True) + @patch("Products.Jobber.zenjobs.app.backend", autospec=True) def test_aborted(t, _backend, _time): tm = 1597059131.762538 @@ -188,7 +187,7 @@ def test_aborted(t, _backend, _time): t.assertEqual(expected_finished, finished) @patch("{src}.time".format(**PATH), autospec=True) - @patch("{src}.app.backend".format(**PATH), autospec=True) + @patch("Products.Jobber.zenjobs.app.backend", autospec=True) def test_failure(t, _backend, _time): tm = 1597059131.762538 @@ -210,7 +209,7 @@ def test_failure(t, _backend, _time): t.assertEqual(expected_finished, finished) @patch("{src}.time".format(**PATH), autospec=True) - @patch("{src}.app.backend".format(**PATH), autospec=True) + @patch("Products.Jobber.zenjobs.app.backend", autospec=True) def test_callbacks_are_aborted(t, _backend, _time): next_jobid = "456" next_job = dict(t.initial) @@ -258,7 +257,7 @@ def test_unknown_task_id(t): req = type("request", (object,), {"id": "1"})() job_retry(req) - @patch("{src}.app.backend".format(**PATH), autospec=True) + @patch("Products.Jobber.zenjobs.app.backend", autospec=True) def test_nominal(t, _backend): tm = 1597059131.762538 req = type("request", (object,), {"id": t.jobid})() @@ -350,7 +349,6 @@ def setUp(t): ) def tearDown(t): - t.layer.redis.flushall() getGlobalSiteManager().unregisterUtility( t.store, IJobStore, name="redis" ) diff --git a/Products/Jobber/tests/test_log.py b/Products/Jobber/tests/test_log.py index a00f71d113..b326b39816 100644 --- a/Products/Jobber/tests/test_log.py +++ b/Products/Jobber/tests/test_log.py @@ -20,7 +20,7 @@ apply_levels, configure_logging, load_log_level_config, - _loglevelconf_filepath, + _loglevel_confs, ) from .utils import LoggingLayer @@ -39,10 +39,8 @@ class ConfigureLoggingTest(TestCase): @patch("{src}.LoggingProxy".format(**PATH), autospec=True) @patch("{src}.apply_levels".format(**PATH), autospec=True) @patch("{src}.load_log_level_config".format(**PATH), autospec=True) - @patch("{src}.get_default_config".format(**PATH), autospec=True) def test_nominal( t, - _get_default_config, _load_log_level_config, _apply_levels, _LoggingProxy, @@ -52,7 +50,6 @@ def test_nominal( _sys, _os, ): - dictConfig = _logging.config.dictConfig exists = _os.path.exists getLogger = _logging.getLogger levelConfig = _load_log_level_config.return_value @@ -76,11 +73,11 @@ def test_nominal( exists.return_value = True - configure_logging() + configure_logging("zenjobs") - dictConfig.assert_called_once_with(_get_default_config.return_value) - exists.assert_called_once_with(_loglevelconf_filepath) - _load_log_level_config.assert_called_once_with(_loglevelconf_filepath) + loglevel_confname = _loglevel_confs["zenjobs"] + exists.assert_called_once_with(loglevel_confname) + _load_log_level_config.assert_called_once_with(loglevel_confname) _apply_levels.assert_called_once_with(levelConfig) getLogger.assert_has_calls(getLogger_calls, any_order=True) @@ -99,10 +96,8 @@ def test_nominal( @patch("{src}.LoggingProxy".format(**PATH), autospec=True) @patch("{src}.apply_levels".format(**PATH), autospec=True) @patch("{src}.load_log_level_config".format(**PATH), autospec=True) - @patch("{src}.get_default_config".format(**PATH), autospec=True) def test_missing_loglevel_file( t, - _get_default_config, _load_log_level_config, _apply_levels, _LoggingProxy, @@ -112,7 +107,6 @@ def test_missing_loglevel_file( _sys, _os, ): - dictConfig = _logging.config.dictConfig exists = _os.path.exists getLogger = _logging.getLogger logs = { @@ -135,10 +129,9 @@ def test_missing_loglevel_file( exists.return_value = False - configure_logging() + configure_logging("zenjobs") - dictConfig.assert_called_once_with(_get_default_config.return_value) - exists.assert_called_once_with(_loglevelconf_filepath) + exists.assert_called_once_with(_loglevel_confs["zenjobs"]) _load_log_level_config.assert_has_calls([]) _apply_levels.assert_has_calls([]) diff --git a/Products/Jobber/tests/test_manager.py b/Products/Jobber/tests/test_manager.py index 40081d2a7d..70af4d3467 100644 --- a/Products/Jobber/tests/test_manager.py +++ b/Products/Jobber/tests/test_manager.py @@ -49,7 +49,6 @@ def setUp(t): ) def tearDown(t): - t.layer.redis.flushall() getGlobalSiteManager().unregisterUtility( t.store, IJobStore, name="redis" ) @@ -139,7 +138,9 @@ def test_query_return_value(t): def test_getUnfinishedJobs_all_types(t): expected = [] - for idx, st in enumerate(states.ALL_STATES): + # in celery 4.4.7 REJECTED was added to UNREADY_STATES (only used in events) + # but it wasn't included to ALL_STATES + for idx, st in enumerate(states.ALL_STATES | states.UNREADY_STATES): rec = dict(t.full, status=st, jobid="abc-{}".format(idx)) t.store[rec["jobid"]] = rec if st in states.UNREADY_STATES: diff --git a/Products/Jobber/tests/test_redisrecord.py b/Products/Jobber/tests/test_redisrecord.py index 3793b3819b..c753c090b8 100644 --- a/Products/Jobber/tests/test_redisrecord.py +++ b/Products/Jobber/tests/test_redisrecord.py @@ -140,25 +140,15 @@ def test_from_signature_with_custom_description(t): def test_from_signal(t): userid = "blink" t.expected["userid"] = userid - body = { - "id": t.jobid, - "task": t.task.name, - "args": t.args, - "kwargs": t.kw, - } - headers = {"userid": userid} + body = (t.args, t.kw, {}) + headers = {"userid": userid, "task": t.task.name, "id": t.jobid} properties = {} actual = RedisRecord.from_signal(body, headers, properties) t.assertDictEqual(t.expected, actual) def test_from_signal_with_details(t): - body = { - "id": t.jobid, - "task": t.task.name, - "args": t.args, - "kwargs": t.kw, - } - headers = {} + body = (t.args, t.kw, {}) + headers = {"id": t.jobid, "task": t.task.name} properties = {"a": 1, "b": 2} t.expected["details"] = properties actual = RedisRecord.from_signal(body, headers, properties) @@ -169,6 +159,7 @@ class BuildRedisRecordFromJobTest(BaseBuildRedisRecord, TestCase): """Test the RedisRecord class with a Job.""" class TestJob(Job): + name = "TestJob" @classmethod def getJobType(cls): return "Test Job" @@ -177,6 +168,9 @@ def getJobType(cls): def getJobDescription(cls, *args, **kw): return "TestJob %s %s" % (args, kw) + from Products.Jobber.zenjobs import app + app.register_task(TestJob) + def setUp(t): t.task = t.TestJob() BaseBuildRedisRecord.setUp(t) diff --git a/Products/Jobber/tests/test_storage.py b/Products/Jobber/tests/test_storage.py index 946ce9e474..d1aef54c66 100644 --- a/Products/Jobber/tests/test_storage.py +++ b/Products/Jobber/tests/test_storage.py @@ -95,7 +95,7 @@ def setUp(t): t.store = JobStore(t.layer.redis) def tearDown(t): - t.layer.redis.flushall() + del t.store def test_keys(t): t.assertIsInstance(t.store.keys(), collections.Iterable) @@ -228,7 +228,7 @@ def setUp(t): t.store = JobStore(t.layer.redis) def tearDown(t): - t.layer.redis.flushall() + del t.store def test___setitem__full(t): t.store[t.full["jobid"]] = t.full @@ -342,7 +342,7 @@ def setUp(t): t.store[t.jobid] = t.initial def tearDown(t): - t.layer.redis.flushall() + del t.store def test_ttl_initial(t): ttl = t.store.ttl(t.jobid) @@ -438,7 +438,7 @@ def setUp(t): t.layer.redis.hmset("zenjobs:job:%s" % jobid, data) def tearDown(t): - t.layer.redis.flushall() + del t.store def test_keys(t): t.assertIsInstance(t.store.keys(), collections.Iterable) diff --git a/Products/Jobber/tests/test_task_utils.py b/Products/Jobber/tests/test_task_utils.py index dcfc8e27fd..d90385a4f5 100644 --- a/Products/Jobber/tests/test_task_utils.py +++ b/Products/Jobber/tests/test_task_utils.py @@ -47,7 +47,6 @@ def setUp(t): ) def tearDown(t): - t.layer.redis.flushall() getGlobalSiteManager().unregisterUtility( t.store, IJobStore, name="redis" ) diff --git a/Products/Jobber/tests/test_worker.py b/Products/Jobber/tests/test_worker.py new file mode 100644 index 0000000000..bb74a906dc --- /dev/null +++ b/Products/Jobber/tests/test_worker.py @@ -0,0 +1,110 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from unittest import TestCase + +from mock import call, Mock, patch + +from Products.Jobber.worker import ( + MySQLdb, + setup_zodb, + _OPERATIONAL_ERROR_RETRY_DELAY, +) + +PATH = {"src": "Products.Jobber.worker"} + + +class TestSetupZODB(TestCase): + """Test the setup_zodb function.""" + + def setUp(t): + log = logging.getLogger() + log.setLevel(logging.FATAL + 1) + + def tearDown(t): + log = logging.getLogger() + log.setLevel(logging.NOTSET) + + @patch("{src}.ZODB".format(**PATH), autospec=True) + @patch("{src}.get_app".format(**PATH), autospec=True) + @patch("{src}.getConfig".format(**PATH), autospec=True) + def test_nominal(t, getConfig_, get_app_, zodb_): + db = Mock() + app = Mock() + filename = "config/zodb.conf" + config = {"zodb-config-file": filename} + zodb_.config.databaseFromURL.return_value = db + get_app_.return_value = app + getConfig_.return_value = config + + setup_zodb() + + t.assertTrue(hasattr(app, "db")) + t.assertEqual(app.db, db) + zodb_.config.databaseFromURL.assert_called_with("file://" + filename) + + @patch("{src}.ZODB".format(**PATH), autospec=True) + @patch("{src}.get_app".format(**PATH), autospec=True) + @patch("{src}.getConfig".format(**PATH), autospec=True) + @patch("{src}.time".format(**PATH), autospec=True) + def test_operational_error(t, time_, getConfig_, get_app_, zodb_): + timeout = 100 + appconfig = {"worker_proc_alive_timeout": timeout} + app = Mock() + app.conf = appconfig + get_app_.return_value = app + + ex = MySQLdb.OperationalError() + zodb_.config.databaseFromURL.side_effect = ex + + filename = "config/zodb.conf" + config = {"zodb-config-file": filename} + getConfig_.return_value = config + + sleep_calls = ( + call(_OPERATIONAL_ERROR_RETRY_DELAY), + call(_OPERATIONAL_ERROR_RETRY_DELAY), + call(_OPERATIONAL_ERROR_RETRY_DELAY), + ) + + with t.assertRaises(SystemExit): + setup_zodb() + + time_.sleep.assert_has_calls(sleep_calls) + t.assertEqual( + len(sleep_calls), zodb_.config.databaseFromURL.call_count + ) + t.assertEqual(len(sleep_calls), time_.sleep.call_count) + + @patch("{src}.ZODB".format(**PATH), autospec=True) + @patch("{src}.get_app".format(**PATH), autospec=True) + @patch("{src}.getConfig".format(**PATH), autospec=True) + @patch("{src}.time".format(**PATH), autospec=True) + def test_unexpected_error(t, time_, getConfig_, get_app_, zodb_): + timeout = 100 + appconfig = {"worker_proc_alive_timeout": timeout} + app = Mock() + app.conf = appconfig + get_app_.return_value = app + + ex = Exception() + zodb_.config.databaseFromURL.side_effect = ex + + filename = "config/zodb.conf" + config = {"zodb-config-file": filename} + getConfig_.return_value = config + + with t.assertRaises(SystemExit): + setup_zodb() + + t.assertEqual(1, zodb_.config.databaseFromURL.call_count) diff --git a/Products/Jobber/tests/test_zentask.py b/Products/Jobber/tests/test_zentask.py index 3ffc10578a..770358ac5e 100644 --- a/Products/Jobber/tests/test_zentask.py +++ b/Products/Jobber/tests/test_zentask.py @@ -103,6 +103,6 @@ def test_subtask(t, _uuid): "headers": {"userid": None}, "task_id": task_id, } - task = t.simple_task.subtask() + task = t.simple_task.signature() t.assertIsInstance(task, Signature) t.assertDictEqual(expected, task.options) diff --git a/Products/Jobber/tests/utils.py b/Products/Jobber/tests/utils.py index 9411701824..cecfa986e6 100644 --- a/Products/Jobber/tests/utils.py +++ b/Products/Jobber/tests/utils.py @@ -48,12 +48,21 @@ class RedisLayer(object): @classmethod def setUp(cls): + pass + + @classmethod + def tearDown(cls): + pass + + @classmethod + def testSetUp(cls): parsed = urlparse(getRedisUrl()) url = "redis://{0}/{1}".format(parsed.netloc, cls.db) cls.redis = getRedisClient(url) @classmethod - def tearDown(cls): + def testTearDown(cls): + cls.redis.flushdb() del cls.redis diff --git a/Products/Jobber/utils/app.py b/Products/Jobber/utils/app.py new file mode 100644 index 0000000000..d299d1c8a0 --- /dev/null +++ b/Products/Jobber/utils/app.py @@ -0,0 +1,16 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + + +def get_app(): + from ..zenjobs import app + + return app diff --git a/Products/Jobber/utils/log.py b/Products/Jobber/utils/log.py index f937a5d8f9..d0d4b50768 100644 --- a/Products/Jobber/utils/log.py +++ b/Products/Jobber/utils/log.py @@ -11,14 +11,14 @@ import inspect import logging -import logging.config -import logging.handlers import os import sys from functools import wraps -from celery.app import current_task +import six + +from celery._state import get_current_task from celery.utils.log import ( LoggingProxy as _LoggingProxy, logger_isa as _logger_isa, @@ -152,7 +152,7 @@ def __init__(self, base=None, task=None, datefmt=None): super(TaskFormatter, self).__init__(datefmt=datefmt) def format(self, record): # noqa: A003 - task = current_task() + task = get_current_task() if task and task.request: self._fmt = self._task record.__dict__.update( @@ -236,7 +236,7 @@ def __init__(self, log=None, adapter=None, aschild=True): if not isinstance(baselog, logging.getLoggerClass()): raise TypeError("'log' callable does produce a logger") self.baselog = baselog - elif isinstance(log, basestring): + elif isinstance(log, six.string_types): self.baselog = logging.getLogger(log) else: raise TypeError( diff --git a/Products/Jobber/utils/utils.py b/Products/Jobber/utils/utils.py new file mode 100644 index 0000000000..7ed5e582ec --- /dev/null +++ b/Products/Jobber/utils/utils.py @@ -0,0 +1,32 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## +from inspect import getargspec + + +def fun_takes_kwargs(fun, kwlist=[]): + """With a function, and a list of keyword arguments, returns arguments + in the list which the function takes. + If the object has an `argspec` attribute that is used instead + of using the :meth:`inspect.getargspec` introspection. + :param fun: The function to inspect arguments of. + :param kwlist: The list of keyword arguments. + Examples + >>> def foo(self, x, y, logfile=None, loglevel=None): + ... return x * y + >>> fun_takes_kwargs(foo, ['logfile', 'loglevel', 'task_id']) + ['logfile', 'loglevel'] + >>> def foo(self, x, y, **kwargs): + >>> fun_takes_kwargs(foo, ['logfile', 'loglevel', 'task_id']) + ['logfile', 'loglevel', 'task_id'] + """ + + S = getattr(fun, 'argspec', getargspec(fun)) + if S.keywords is not None: + return kwlist + return [kw for kw in kwlist if kw in S.args] diff --git a/Products/Jobber/worker.py b/Products/Jobber/worker.py index c1db2aa7db..4fe74ea130 100644 --- a/Products/Jobber/worker.py +++ b/Products/Jobber/worker.py @@ -1,6 +1,6 @@ ############################################################################## # -# Copyright (C) Zenoss, Inc. 2019, all rights reserved. +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. @@ -9,24 +9,126 @@ from __future__ import absolute_import +import importlib +import itertools import logging +import time + +import MySQLdb +import pathlib2 as pathlib +import Products import ZODB.config -from .config import ZenJobs -from .zenjobs import app +from Zope2.App import zcml + +from .config import getConfig +from .utils.app import get_app + +_OPERATIONAL_ERROR_RETRY_DELAY = 0.5 +_mlog = logging.getLogger("zen.zenjobs.worker") + + +def initialize_zenoss_env(**kw): + start = time.time() + + from OFS.Application import import_products + from Zope2.App import zcml + + import Products.ZenWidgets + + from Products.ZenUtils.Utils import load_config_override + from Products.ZenUtils.zenpackload import load_zenpacks + + import_products() + + # The Zenoss environment requires that the 'zenoss.zenpacks' entrypoints + # be explicitely loaded because celery doesn't know to do that. + # Not loading those entrypoints means that celery will be unaware of + # any celery 'task' definitions in the ZenPacks. + load_zenpacks() + + zcml.load_site() + load_config_override("scriptmessaging.zcml", Products.ZenWidgets) + + _mlog.getChild("initialize_zenoss_env").info( + "Zenoss environment initialized (%.2f sec elapsed)" + % (time.time() - start) + ) + + +def register_tasks(**kw): + # defer import ZenPacks until here because it doesn't exist during + # an image build. + import ZenPacks + + search_paths = tuple( + pathlib.Path(p) + for p in itertools.chain(Products.__path__, ZenPacks.__path__) + ) + zcml_files = ( + fn for path in search_paths for fn in path.rglob("**/jobs.zcml") + ) + for fn in zcml_files: + root = next( + ( + p + for p in search_paths + if fn.as_posix().startswith(p.as_posix()) + ), + None, + ) + if root is None: + continue + modroot = len(root.parts) - 1 + modname = ".".join(fn.parent.parts[modroot:]) + module = importlib.import_module(modname) + zcml.load_config(fn.name, module) + -mlog = logging.getLogger("zen.zenjobs.worker") +def report_tasks(**kw): + """Log the tasks Celery knows about.""" + log = _mlog.getChild("report_tasks") + log.info("Registered job classes:") + for taskname in sorted(get_app().tasks.keys()): + log.info(".. %s", taskname) def setup_zodb(**kw): """Initialize a ZODB connection.""" - zodbcfg = ZenJobs.get("zodb-config-file") + zodbcfg = getConfig().get("zodb-config-file") url = "file://%s" % zodbcfg - app.db = ZODB.config.databaseFromURL(url) - mlog.getChild("setup_zodb").info("ZODB connection initialized") + app = get_app() + attempt = 0 + log = _mlog.getChild("setup_zodb") + while attempt < 3: + try: + app.db = ZODB.config.databaseFromURL(url) + except MySQLdb.OperationalError as ex: + error = str(ex) + # Sleep for a very short duration. Celery signal handlers + # are given short durations to complete. + time.sleep(_OPERATIONAL_ERROR_RETRY_DELAY) + attempt += 1 + except Exception as ex: + log.exception("unexpected failure") + # To avoid retrying on unexpected errors, set `attempt` to 3 to + # cause the loop to exit on the next iteration to allow the + # "else:" clause to run and cause this worker to exit. + error = str(ex) + attempt = 3 + else: + log.info("ZODB connection initialized") + # Break the loop since the database initialization succeeded. + break + else: + log.error("failed to initialize ZODB connection: %s", error) + raise SystemExit("Unable to initialize ZODB connection") def teardown_zodb(**kw): """Shut down the ZODB connection.""" - app.db.close() - mlog.getChild("teardown_zodb").info("ZODB connection closed") + app = get_app() + db = getattr(app, "db", None) + if db is not None: + db.close() + _mlog.getChild("teardown_zodb").info("ZODB connection closed") diff --git a/Products/Jobber/zenjobs.py b/Products/Jobber/zenjobs.py index 7ab60ca0cd..3a32d0982c 100644 --- a/Products/Jobber/zenjobs.py +++ b/Products/Jobber/zenjobs.py @@ -10,13 +10,12 @@ from __future__ import absolute_import from celery import Celery -from kombu.serialization import register +from kombu import serialization from .serialization import without_unicode - # Register custom serializer -register( +serialization.register( "without-unicode", without_unicode.dump, without_unicode.load, @@ -24,8 +23,14 @@ content_encoding="utf-8", ) -app = Celery( - "zenjobs", - config_source="Products.Jobber.config:Celery", - task_cls="Products.Jobber.task:ZenTask", -) + +def _buildapp(): + app = Celery( + "zenjobs", + task_cls="Products.Jobber.task:ZenTask", + config_source="Products.Jobber.config:ZenCeleryConfig", + ) + return app + + +app = _buildapp() diff --git a/Products/ZenCallHome/CallHomeStatus.py b/Products/ZenCallHome/CallHomeStatus.py index 1918c56818..b23a8fb87e 100644 --- a/Products/ZenCallHome/CallHomeStatus.py +++ b/Products/ZenCallHome/CallHomeStatus.py @@ -45,7 +45,7 @@ def create_redis_client(redis_url): return client def _connected_to_redis(self): - """ ensures we have a connection to redis """ + """ensures we have a connection to redis""" if self._redis_client is None: now = time.time() if ( diff --git a/Products/ZenCallHome/HostData.py b/Products/ZenCallHome/HostData.py index 4dcf4a9cf4..4e5a2733c2 100644 --- a/Products/ZenCallHome/HostData.py +++ b/Products/ZenCallHome/HostData.py @@ -7,44 +7,50 @@ # ############################################################################## - -import string -import os +import logging import math +import os import platform import socket +import string +from functools import total_ordering from subprocess import Popen, PIPE -from Products.ZenCallHome import IHostData, IZenossEnvData -from zope.interface import implements -import logging +from zope.interface import implementer + +from . import IHostData, IZenossEnvData + log = logging.getLogger("zen.callhome") -LOCAL_HOSTNAMES = ["localhost", - "localhost.localdomain", - socket.gethostname(), - socket.getfqdn()] +LOCAL_HOSTNAMES = [ + "localhost", + "localhost.localdomain", + socket.gethostname(), + socket.getfqdn(), +] +@implementer(IHostData) class PlatformData(object): - implements(IHostData) - def callHomeData(self): distro = " ".join(platform.linux_distribution()) processor = platform.processor() system = platform.system() release = platform.release() - yield ("OS", - "{distro} {processor} " - "({system} kernel {release})".format(**locals())) + yield ( + "OS", + "{distro} {processor} " + "({system} kernel {release})".format(**locals()), + ) +@implementer(IHostData) class ProcFileData(object): """ Used to gather proc file statistics for call home """ - implements(IHostData) + _proc_file = None _parser = None @@ -80,7 +86,6 @@ def _createParser(self): class ProcFileParser(object): - @classmethod def _parse_key_value(cls, line): if not line.strip(): @@ -89,7 +94,6 @@ def _parse_key_value(cls, line): class CpuinfoParser(ProcFileParser): - def __init__(self): self._processors = [] self._processor = None @@ -112,8 +116,10 @@ def output(self): cores += count dct = dict(tuples) cache_size = convert_kb(dct["cache size"]) - yield ("CPU", - "{dct[model name]} ({cache_size} cache)".format(**locals())) + yield ( + "CPU", + "{dct[model name]} ({cache_size} cache)".format(**locals()), + ) yield "CPU Cores", cores def _summarize(self): @@ -136,11 +142,10 @@ class CpuProcFileData(ProcFileData): _parser = CpuinfoParser def _ioErrorOutputHandler(self): - yield 'CPU Cores', 'Not available' + yield "CPU Cores", "Not available" class MemoryStat(object): - def __init__(self, label, total_key, free_key): self.label = label self._total = [total_key, None] @@ -152,15 +157,17 @@ def set(self, key, value): stat[1] = convert_kb(value, key.endswith("Total")) def __repr__(self): - return ("{self._free[1]} of " - "{self._total[1]} available").format(**locals()) + return ("{self._free[1]} of " "{self._total[1]} available").format( + **locals() + ) class MeminfoParser(ProcFileParser): - def __init__(self): - self._stats = [MemoryStat("Memory", "MemTotal", "MemFree"), - MemoryStat("Swap", "SwapTotal", "SwapFree")] + self._stats = [ + MemoryStat("Memory", "MemTotal", "MemFree"), + MemoryStat("Swap", "SwapTotal", "SwapFree"), + ] def parse(self, line): key, value = self._parse_key_value(line) @@ -179,13 +186,14 @@ class MemProcFileData(ProcFileData): def _ioErrorOutputHandler(self): for stat in self._parser()._stats: - yield stat.label, 'Not available' + yield stat.label, "Not available" class CommandData(object): """ Base class for executing and return data based on executing a command """ + _args = [] _parser = None @@ -215,20 +223,27 @@ def _osErrorOutputHandler(self): return tuple() +@total_ordering class FilesystemInfo(object): - def __init__(self, mounted_on="", size=None, avail=None): self.mounted_on = mounted_on self.size = size self.avail = avail self.supporting = [] - def __cmp__(self, other_fs_info): - return cmp(self.mounted_on, other_fs_info.mounted_on) + def __eq__(self, other): + return self.mounted_on == other.mounted_on + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return self.mounted_on < other.mounted_on def __repr__(self): - repr_ = ("'{self.mounted_on}', " - "{self.avail} of {self.size} available").format(**locals()) + repr_ = ( + "'{self.mounted_on}', " "{self.avail} of {self.size} available" + ).format(**locals()) if self.supporting: supporting = ", ".join(self.supporting) repr_ = "{repr_} (supports {supporting})".format(**locals()) @@ -236,23 +251,26 @@ def __repr__(self): class DfParser(object): - def __init__(self): - self._zenoss_mounts = {zenhome.environ_key: "", - zendshome.environ_key: "", - rabbitmq_mnesia_base.environ_key: ""} + self._zenoss_mounts = { + zenhome.environ_key: "", + zendshome.environ_key: "", + rabbitmq_mnesia_base.environ_key: "", + } self._filesystems = [] def parse(self, line): if not line.startswith("/"): return filesystem, size, used, avail, use_pct, mounted_on = line.split() - fs_info = FilesystemInfo(mounted_on, convert_kb(size), convert_kb( - avail, False)) + fs_info = FilesystemInfo( + mounted_on, convert_kb(size), convert_kb(avail, False) + ) for environ_var in zenhome, zendshome, rabbitmq_mnesia_base: if environ_var.value is not None and environ_var.value.startswith( - mounted_on): + mounted_on + ): key = environ_var.environ_key if len(mounted_on) > len(self._zenoss_mounts[key]): fs_info.supporting.append(key) @@ -266,8 +284,8 @@ def output(self): yield "Filesystem", str(filesystem) +@implementer(IHostData) class DfData(CommandData): - implements(IHostData) _args = ["df", "-Pk"] _parser = DfParser @@ -276,9 +294,9 @@ def _osErrorOutputHandler(self): yield "Filesystem", "Not Available" +@implementer(IHostData) class HostId(CommandData): - implements(IHostData) - _args = ['hostid'] + _args = ["hostid"] def __init__(self): self._parser = HostId @@ -296,7 +314,6 @@ def _osErrorOutputHandler(self): class RpmParser(object): - def __init__(self, key): self._output = None self._key = key @@ -306,22 +323,19 @@ def parse(self, line): @property def output(self): - label = 'RPM' + label = "RPM" if self._key: label = "%s - %s" % (label, self._key) yield label, self._output class RPMData(CommandData): - def __init__(self, rpm_arg): super(RPMData, self).__init__() self._rpm_arg = rpm_arg - if ( - os.path.exists("/etc/redhat-release") - or - os.path.exists("/etc/SuSe-release") - ): + if os.path.exists("/etc/redhat-release") or os.path.exists( + "/etc/SuSe-release" + ): self._rpm_support = True self._args = ["rpm", "-q", rpm_arg] else: @@ -332,7 +346,7 @@ def _createParser(self): return RpmParser(self._rpm_arg) def _osErrorOutputHandler(self): - label = 'RPM' + label = "RPM" if self._rpm_arg: label = "%s - %s" % (label, self._rpm_arg) if self._rpm_support: @@ -342,33 +356,30 @@ def _osErrorOutputHandler(self): yield label, value +@implementer(IZenossEnvData) class ZenossRPMData(RPMData): - implements(IZenossEnvData) - def __init__(self): - super(ZenossRPMData, self).__init__('zenoss') + super(ZenossRPMData, self).__init__("zenoss") +@implementer(IZenossEnvData) class ZenDSRPMData(RPMData): - implements(IZenossEnvData) - def __init__(self): - super(ZenDSRPMData, self).__init__('zends') + super(ZenDSRPMData, self).__init__("zends") +@implementer(IZenossEnvData) class CoreZenpackRPMData(RPMData): - implements(IZenossEnvData) - def __init__(self): - super(CoreZenpackRPMData, self).__init__('zenoss-core-zenpacks') + super(CoreZenpackRPMData, self).__init__("zenoss-core-zenpacks") +@implementer(IZenossEnvData) class EnterpriseZenpackRPMData(RPMData): - implements(IZenossEnvData) - def __init__(self): super(EnterpriseZenpackRPMData, self).__init__( - 'zenoss-enterprise-zenpacks') + "zenoss-enterprise-zenpacks" + ) class Zenhome(object): @@ -437,29 +448,26 @@ def generate(self): rabbitmq_mnesia_base = RabbitmqMnesiaBase() +@implementer(IZenossEnvData) class ZenHomeData(object): - implements(IZenossEnvData) - def callHomeData(self): return zenhome.generate() +@implementer(IZenossEnvData) class ZenDSHomeData(object): - implements(IZenossEnvData) - def callHomeData(self): return zendshome.generate() +@implementer(IZenossEnvData) class RabbitData(object): - implements(IZenossEnvData) - def callHomeData(self): return rabbitmq_mnesia_base.generate() def convert_kb(kb_str, round_up=True): - units = ['YB', 'ZB', 'EB', 'PB', 'TB', 'GB', 'MB', 'KB'] + units = ["YB", "ZB", "EB", "PB", "TB", "GB", "MB", "KB"] quantity = int(kb_str.translate(None, string.ascii_letters)) # 5 percent fudge factor for rounding up while quantity > (1024 - (1024 * 0.05)): diff --git a/Products/ZenCallHome/VersionHistory.py b/Products/ZenCallHome/VersionHistory.py index 872edfc164..cc14379b94 100644 --- a/Products/ZenCallHome/VersionHistory.py +++ b/Products/ZenCallHome/VersionHistory.py @@ -7,24 +7,24 @@ # ############################################################################## - -from zope.interface import implements -from Products.ZenCallHome import IVersionHistoryCallHomeCollector -from Products.ZenCallHome.callhome import (REPORT_DATE_KEY, - VERSION_HISTORIES_KEY) import logging + +from zope.interface import implementer + +from . import IVersionHistoryCallHomeCollector +from .callhome import REPORT_DATE_KEY, VERSION_HISTORIES_KEY + log = logging.getLogger("zen.callhome") VERSION_START_KEY = "Version Start" +@implementer(IVersionHistoryCallHomeCollector) class VersionHistoryCallHomeCollector(object): """ - Superclass for version history collectors that - provides some basic functionality if you - provide the code to get the current version + Superclass for version history collectors that provides some basic + functionality if you provide the code to get the current version. """ - implements(IVersionHistoryCallHomeCollector) def __init__(self, versionedEntity): self._entity = versionedEntity @@ -39,15 +39,14 @@ def addVersionHistory(self, dmd, callHomeData): def getCurrentVersion(self, dmd, callHomeData): """ - implement this to determine the current - version. probably available in the - callhome data. + Implement this to determine the current version. + Probably available in the callhome data. """ raise NotImplementedError() def createVersionHistoryRecord(self, dmd, callHomeData): """ - Create a record object with the date + Create a record object with the date. """ reportDate = callHomeData[REPORT_DATE_KEY] record = {VERSION_START_KEY: reportDate} @@ -56,19 +55,20 @@ def createVersionHistoryRecord(self, dmd, callHomeData): class KeyedVersionHistoryCallHomeCollector(VersionHistoryCallHomeCollector): """ - If version info can be pulled from the callhome - data by simple keys, then this class handles - all the work. + If version info can be pulled from the callhome data by simple keys, + then this class handles all the work. """ def __init__(self, versionedEntity, historyRecordKeys=[]): - super(KeyedVersionHistoryCallHomeCollector, - self).__init__(versionedEntity) + super(KeyedVersionHistoryCallHomeCollector, self).__init__( + versionedEntity + ) self._historyRecordKeys = historyRecordKeys def createVersionHistoryRecord(self, dmd, callHomeData): - record = super(KeyedVersionHistoryCallHomeCollector, - self).createVersionHistoryRecord(dmd, callHomeData) + record = super( + KeyedVersionHistoryCallHomeCollector, self + ).createVersionHistoryRecord(dmd, callHomeData) if self._historyRecordKeys: for hrKey, targetKey in self._historyRecordKeys.iteritems(): value = self.getKeyedValue(hrKey, callHomeData) @@ -77,7 +77,7 @@ def createVersionHistoryRecord(self, dmd, callHomeData): return record def getKeyedValue(self, hrKey, callHomeData): - key_list = hrKey.split('.') + key_list = hrKey.split(".") currObj = callHomeData for key in key_list: currObj = currObj.get(key, None) @@ -87,16 +87,18 @@ def getKeyedValue(self, hrKey, callHomeData): class ZenossVersionHistoryCallHomeCollector( - KeyedVersionHistoryCallHomeCollector): - """ - """ + KeyedVersionHistoryCallHomeCollector +): + """ """ + ZENOSS_VERSION_HISTORY_KEY = "Zenoss" ZENOSS_VERSION_HISTORY_RECORD_KEYS = {} def __init__(self): super(ZenossVersionHistoryCallHomeCollector, self).__init__( self.ZENOSS_VERSION_HISTORY_KEY, - self.ZENOSS_VERSION_HISTORY_RECORD_KEYS) + self.ZENOSS_VERSION_HISTORY_RECORD_KEYS, + ) def getCurrentVersion(self, dmd, callHomeData): - return self.getKeyedValue('Zenoss App Data.Zenoss', callHomeData) + return self.getKeyedValue("Zenoss App Data.Zenoss", callHomeData) diff --git a/Products/ZenCallHome/ZenossAppData.py b/Products/ZenCallHome/ZenossAppData.py index a5fa333317..13d0cc3de5 100644 --- a/Products/ZenCallHome/ZenossAppData.py +++ b/Products/ZenCallHome/ZenossAppData.py @@ -7,52 +7,60 @@ # ############################################################################## - +import logging import time -from Products.ZenCallHome import (IZenossData, IDeviceResource, - IDeviceCpuCount, IDeviceType, - IVirtualDeviceType) -from zope.interface import implements -from zope.component import subscribers, getAdapters -from Products.Zuul import getFacade -from Products.ZenModel.DeviceComponent import DeviceComponent + from itertools import chain -import logging -from Products.Zuul.catalog.interfaces import IModelCatalogTool from zenoss.protocols.services.zep import ZepConnectionError -from . import IDeviceLink +from zope.interface import implementer +from zope.component import subscribers, getAdapters + +from Products.ZenModel.DeviceComponent import DeviceComponent +from Products.Zuul import getFacade +from Products.Zuul.catalog.interfaces import IModelCatalogTool + +from . import ( + IDeviceCpuCount, + IDeviceLink, + IDeviceResource, + IDeviceType, + IVirtualDeviceType, + IZenossData, +) log = logging.getLogger("zen.callhome") +@implementer(IZenossData) class ZenossAppData(object): - implements(IZenossData) - def callHomeData(self, dmd): self.dmd = dmd self._catalog = IModelCatalogTool(self.dmd) - stats = (self.server_key, - self.google_key, - self.all_versions, - self.event_classes, - self.event_count, - self.reports, - self.templates, - self.systems, - self.groups, - self.locations, - self.total_collectors, - self.zenpacks, - self.user_count, - self.product_count, - self.product_name, - self.components) + stats = ( + self.server_key, + self.google_key, + self.all_versions, + self.event_classes, + self.event_count, + self.reports, + self.templates, + self.systems, + self.groups, + self.locations, + self.total_collectors, + self.zenpacks, + self.user_count, + self.product_count, + self.product_name, + self.components, + ) return chain.from_iterable(map(lambda fn: fn(), stats)) def components(self): - brains = self._catalog.search(types=(DeviceComponent,), - facets_for_field=["meta_type"]) + brains = self._catalog.search( + types=(DeviceComponent,), facets_for_field=["meta_type"] + ) if brains.facets and brains.facets.get("meta_type"): facets = brains.facets["meta_type"] comps = facets.get_values() @@ -66,7 +74,8 @@ def product_name(self): def product_count(self): manufacturers = self.dmd.Manufacturers.objectValues( - spec='Manufacturer') + spec="Manufacturer" + ) prodCount = 0 for m in manufacturers: prodCount += m.products.countObjects() @@ -85,16 +94,21 @@ def google_key(self): def zenpacks(self): for zenpack in self.dmd.ZenPackManager.packs(): - yield ("Zenpack", - "{zenpack.id} {zenpack.version}".format(**locals())) + yield ( + "Zenpack", + "{zenpack.id} {zenpack.version}".format(**locals()), + ) def all_versions(self): - zenoss_version, cc_version = self.dmd.About.getZenossVersion(), self.dmd.About.getControlCenterVersion() + zenoss_version, cc_version = ( + self.dmd.About.getZenossVersion(), + self.dmd.About.getControlCenterVersion(), + ) yield zenoss_version.name, zenoss_version.full() yield cc_version.name, cc_version.full() def event_classes(self): - yield 'Evt Mappings', self.dmd.Events.countInstances() + yield "Evt Mappings", self.dmd.Events.countInstances() def reports(self): yield "Reports", self.dmd.Reports.countReports() @@ -116,21 +130,25 @@ def total_collectors(self): yield "Collectors", len(results) def event_count(self): - zep = getFacade('zep', self.dmd) + zep = getFacade("zep", self.dmd) try: - yield ("Event Count", - zep.countEventsSince(time.time() - 24 * 60 * 60)) + yield ( + "Event Count", + zep.countEventsSince(time.time() - 24 * 60 * 60), + ) except ZepConnectionError: yield "Event Count: last 24hr", "Not Available" -VM_MACS = {"00:0C:29": 'VMware Guest', - "00:50:56": 'VMware Guest', - "00:16:3e": 'Xen Guest'} +VM_MACS = { + "00:0C:29": "VMware Guest", + "00:50:56": "VMware Guest", + "00:16:3e": "Xen Guest", +} -class MacAddressVirtualDeviceType(object): - implements(IVirtualDeviceType) +@implementer(IVirtualDeviceType) +class MacAddressVirtualDeviceType(object): def __init__(self, device): self._device = device self._vmType = None @@ -143,9 +161,8 @@ def vmType(self): return self._vmType +@implementer(IDeviceType) class DeviceType(object): - implements(IDeviceType) - def __init__(self, device): self._device = device self._isVM = None @@ -163,7 +180,7 @@ def isVM(self): return self._isVM def type(self): - dType = 'Physical' + dType = "Physical" if self._isVM is None: self.isVM() if self._vmType: @@ -171,9 +188,8 @@ def type(self): return dType +@implementer(IDeviceResource) class DeviceTypeCounter(object): - implements(IDeviceResource) - def __init__(self, device): self._device = device @@ -192,22 +208,22 @@ def _get_type(self): return dev.type(), dev.isVM() +@implementer(IDeviceResource) class DeviceClassProductionStateCount(object): - implements(IDeviceResource) - def __init__(self, device): self._device = device def processDevice(self, stats): - key = "%s: %s" % (self._device.getDeviceClassPath(), - self._device.getProductionStateString()) + key = "%s: %s" % ( + self._device.getDeviceClassPath(), + self._device.getProductionStateString(), + ) stats.setdefault(key, 0) stats[key] += 1 +@implementer(IDeviceCpuCount) class DeviceCpuCounter(object): - implements(IDeviceCpuCount) - def __init__(self, device): self._device = device @@ -218,9 +234,8 @@ def cpuCount(self): return 0 +@implementer(IZenossData) class ZenossResourceData(object): - implements(IZenossData) - def __init__(self): self._dmd = None self._catalog = None @@ -233,24 +248,30 @@ def callHomeData(self, dmd): yield key, value def _process_devices(self): - stats = {'Device Count': 0, - 'Decommissioned Devices': 0, - 'CPU Cores': 0} + stats = { + "Device Count": 0, + "Decommissioned Devices": 0, + "CPU Cores": 0, + } LINKED_DEVICES = "Linked Devices" if LINKED_DEVICES not in stats: stats[LINKED_DEVICES] = 0 for device in self._dmd.Devices.getSubDevicesGen_recursive(): - stats['Device Count'] += 1 + stats["Device Count"] += 1 if device.getProductionState() < 0: stats["Decommissioned Devices"] += 1 cpuCount = IDeviceCpuCount(device).cpuCount() log.debug("Devices %s has %s cpu cores", device, cpuCount) - stats['CPU Cores'] += cpuCount + stats["CPU Cores"] += cpuCount for adapter in subscribers([device], IDeviceResource): adapter.processDevice(stats) found_linked = False for name, adapter in getAdapters((device,), IDeviceLink): - if adapter.linkedDevice() and adapter.linkedDevice().device().getProductionState() > 0: + if ( + adapter.linkedDevice() + and adapter.linkedDevice().device().getProductionState() + > 0 + ): key = "%s - %s" % (LINKED_DEVICES, name) if key not in stats: stats[key] = 0 diff --git a/Products/ZenCallHome/__init__.py b/Products/ZenCallHome/__init__.py index def0578128..460661ffc6 100644 --- a/Products/ZenCallHome/__init__.py +++ b/Products/ZenCallHome/__init__.py @@ -13,78 +13,75 @@ class ICallHomeCollector(Interface): """ - Implementers provide call home data + Implementers provide call home data. """ def generateData(self): """ - Generate data to be sent via call home - @return: dictionary of data to be sent. - values keyed by "_ERRORS_" should - be objects that will be attached - at the top level. + Generate data to be sent via call home. + + @return: Dictionary of data to be sent. Values keyed by "_ERRORS_" + should be objects that will be attached at the top level. @rtype: dict """ class IMasterCallHomeCollector(Interface): """ - Implementers provide call home data when collected on zenoss master + Implementers provide call home data when collected on zenoss master. """ def generateData(self, dmd): """ - Generate data to be sent via call home - @param dmd: databse connection - @return: dictionary of data to be sent - values keyed by "_ERRORS_" should - be objects that will be attached - at the top level. + Generate data to be sent via call home. + + @param dmd: Databse connection + @return: Dictionary of data to be sent values keyed by "_ERRORS_" + should be objects that will be attached at the top level. @rtype: dict """ class IVersionHistoryCallHomeCollector(Interface): """ - Implementers provide version history records + Implementers provide version history records. """ def addVersionHistory(self, dmd, callHomeData): """ - Create records to be added to version history - @param the callhome data that will be modified - then sent + Create records to be added to version history. + @param the callhome data that will be modified then sent. """ class IHostData(Interface): """ - Used to gather Host machine statistics for call home + Used to gather Host machine statistics for call home. """ def callHomeData(self): """ - @return:: name, value pairs of host stats for call home - @rtype: list or generator of tuples + @return:: name, value pairs of host stats for call home. + @rtype: list or generator of tuples. """ class IZenossData(Interface): """ - Used to gather Zenoss statistics for call home + Used to gather Zenoss statistics for call home. """ def callHomeData(self, dmd): """ - @param: dmd connection - @return: name, value pairs of Zenoss instance stats for call home - @rtype: list or generator of tuples + @param: dmd connection. + @return: name, value pairs of Zenoss instance stats for call home. + @rtype: list or generator of tuples. """ class IZenossEnvData(Interface): """ - Used to gather the Zenoss environment data for call home + Used to gather the Zenoss environment data for call home. """ def callHomeData(self): @@ -148,6 +145,7 @@ class IVirtualDeviceType(Interface): Subscription adapter. Determine the virtual machine type of a device if any. More than one impl can be registered per Device """ + def vmType(self): """ @return the type of virtual machine or None if not a virtual diff --git a/Products/ZenCallHome/callhome.py b/Products/ZenCallHome/callhome.py index 6958eef461..91684c824a 100755 --- a/Products/ZenCallHome/callhome.py +++ b/Products/ZenCallHome/callhome.py @@ -7,23 +7,27 @@ # ############################################################################## +from __future__ import print_function +import logging import json + from datetime import datetime -from zope.interface import implements +from zope.interface import implementer from zope.component import getUtilitiesFor -from Products.ZenCallHome.transport import CallHome - -from Products.ZenCallHome import (IZenossData, IHostData, IZenossEnvData, - ICallHomeCollector, - IMasterCallHomeCollector, - IVersionHistoryCallHomeCollector) -from Products.ZenUtils.ZenScriptBase import ZenScriptBase +from Products.ZenCallHome import ( + ICallHomeCollector, + IHostData, + IMasterCallHomeCollector, + IVersionHistoryCallHomeCollector, + IZenossData, + IZenossEnvData, +) from Products.ZenCallHome.CallHomeStatus import CallHomeStatus +from Products.ZenUtils.ZenScriptBase import ZenScriptBase -import logging log = logging.getLogger("zen.callhome") ERROR_KEY = "_ERROR_" @@ -32,9 +36,7 @@ VERSION_HISTORIES_KEY = "Version History" - class CallHomeCollector(object): - def __init__(self, utilityClass): self._utilityClass = utilityClass self._needsDmd = False @@ -47,8 +49,12 @@ def generateData(self, dmd=None): args.append(dmd) for name, utilClass in getUtilitiesFor(self._utilityClass): try: - log.debug("Getting data from %s %s, args: %s", - name, utilClass, str(args)) + log.debug( + "Getting data from %s %s, args: %s", + name, + utilClass, + str(args), + ) util = utilClass() for key, val in util.callHomeData(*args): log.debug("Data: %s | %s", key, val) @@ -62,14 +68,18 @@ def generateData(self, dmd=None): stats[key] = val except Exception as e: errorObject = dict( - source=utilClass.__name__, - key=name, - callhome_collector=self.__class__.__name__, - exception=str(e)) - log.warn("Continuing after catching exception while " - "generating callhome data for collector " - "%(callhome_collector)s (%(source)s:%(key)s : " - "%(exception)s", errorObject) + source=utilClass.__name__, + key=name, + callhome_collector=self.__class__.__name__, + exception=str(e), + ) + log.warn( + "Continuing after catching exception while " + "generating callhome data for collector " + "%(callhome_collector)s (%(source)s:%(key)s : " + "%(exception)s", + errorObject, + ) errors.append(errorObject) returnValue = {self._key: stats} if errors: @@ -77,11 +87,11 @@ def generateData(self, dmd=None): return returnValue +@implementer(IMasterCallHomeCollector) class ZenossDataCallHomeCollector(CallHomeCollector): """ Gathers data from all IZenossData utilities registered """ - implements(IMasterCallHomeCollector) def __init__(self): super(ZenossDataCallHomeCollector, self).__init__(IZenossData) @@ -89,22 +99,22 @@ def __init__(self): self._needsDmd = True +@implementer(ICallHomeCollector) class HostDataCallHomeCollector(CallHomeCollector): """ Gathers data from all IHostData utilities registered """ - implements(ICallHomeCollector) def __init__(self): super(HostDataCallHomeCollector, self).__init__(IHostData) self._key = "Host Data" +@implementer(IMasterCallHomeCollector) class ZenossEnvDataCallHomeCollector(CallHomeCollector): """ Gathers data from all IZenossEnvData utilities registered """ - implements(IMasterCallHomeCollector) def __init__(self): super(ZenossEnvDataCallHomeCollector, self).__init__(IZenossEnvData) @@ -123,8 +133,9 @@ def getExistingVersionHistories(self): metricsString = self._dmd.callHome.metrics if metricsString and metricsString.strip(): metricsObj = json.loads(metricsString) - versionHistories = metricsObj.get(VERSION_HISTORIES_KEY, - {}) + versionHistories = metricsObj.get( + VERSION_HISTORIES_KEY, {} + ) except AttributeError: pass return {VERSION_HISTORIES_KEY: versionHistories} @@ -144,12 +155,15 @@ def getData(self): data.update(chData) except Exception as e: errorObject = dict( - callhome_collector=utilClass.__name__, - name=name, - exception=str(e)) - log.warn("Caught exception while generating callhome data " - "%(callhome_collector)s:%(name)s : %(exception)s", - errorObject) + callhome_collector=utilClass.__name__, + name=name, + exception=str(e), + ) + log.warn( + "Caught exception while generating callhome data " + "%(callhome_collector)s:%(name)s : %(exception)s", + errorObject, + ) errors.append(errorObject) if self._master: for name, utilClass in getUtilitiesFor(IMasterCallHomeCollector): @@ -162,26 +176,35 @@ def getData(self): data.update(chData) except Exception as e: errorObject = dict( - callhome_collector=utilClass.__name__, - name=name, - exception=str(e)) - log.warn("Caught exception while generating callhome " - "data %(callhome_collector)s:%(name)s : " - "%(exception)s", errorObject) + callhome_collector=utilClass.__name__, + name=name, + exception=str(e), + ) + log.warn( + "Caught exception while generating callhome " + "data %(callhome_collector)s:%(name)s : " + "%(exception)s", + errorObject, + ) errors.append(errorObject) if self._dmd: for name, utilClass in getUtilitiesFor( - IVersionHistoryCallHomeCollector): + IVersionHistoryCallHomeCollector + ): try: utilClass().addVersionHistory(self._dmd, data) except Exception as e: errorObject = dict( - callhome_collector=utilClass.__name__, - name=name, - exception=str(e)) - log.warn("Caught exception while adding version " - "history: %(callhome_collector)s:%(name)s : " - "%(exception)s", errorObject) + callhome_collector=utilClass.__name__, + name=name, + exception=str(e), + ) + log.warn( + "Caught exception while adding version " + "history: %(callhome_collector)s:%(name)s : " + "%(exception)s", + errorObject, + ) errors.append(errorObject) if errors: data[EXTERNAL_ERROR_KEY] = errors @@ -189,13 +212,16 @@ def getData(self): class Main(ZenScriptBase): - def run(self): if self.options.status: chs = CallHomeStatus() - print 'Status:\t Description:\t Error:\t' + print("Status:\t Description:\t Error:\t") for i in chs.status(): - print '{0}\t {1}\t {2}'.format(i.get('status'), i.get('description'), i.get('error')) + print( + "{0}\t {1}\t {2}".format( + i.get("status"), i.get("description"), i.get("error") + ) + ) return if self.options.master: @@ -211,40 +237,55 @@ def run(self): data = chd.getData() if self.options.pretty: from pprint import pprint + pprint(data) else: sort = False if self.options.jsonIndent: sort = True - print(json.dumps(data, indent=self.options.jsonIndent, - sort_keys=sort)) + print( + json.dumps( + data, indent=self.options.jsonIndent, sort_keys=sort + ) + ) chs.stage(chs.COLLECT_CALLHOME, "FINISHED") def buildOptions(self): """basic options setup sub classes can add more options here""" ZenScriptBase.buildOptions(self) - self.parser.add_option('-M', '--master', - dest='master', - default=False, - action='store_true', - help='Gather zenoss master data') - self.parser.add_option('-p', - dest='pretty', - default=False, - action='store_true', - help='pretty print the output') - self.parser.add_option('-i', '--json_indent', - dest='jsonIndent', - help='indent setting for json output', - default=None, - type='int') - self.parser.add_option('-s', '--status', - action='store_true', - dest='status', - help='show detail status information', - default=False) - - -if __name__ == '__main__': + self.parser.add_option( + "-M", + "--master", + dest="master", + default=False, + action="store_true", + help="Gather zenoss master data", + ) + self.parser.add_option( + "-p", + dest="pretty", + default=False, + action="store_true", + help="pretty print the output", + ) + self.parser.add_option( + "-i", + "--json_indent", + dest="jsonIndent", + help="indent setting for json output", + default=None, + type="int", + ) + self.parser.add_option( + "-s", + "--status", + action="store_true", + dest="status", + help="show detail status information", + default=False, + ) + + +if __name__ == "__main__": main = Main(connect=False) main.run() diff --git a/Products/ZenCallHome/tests/testCallHomeGeneration.py b/Products/ZenCallHome/tests/testCallHomeGeneration.py index 40e4813e73..f0facae9e8 100644 --- a/Products/ZenCallHome/tests/testCallHomeGeneration.py +++ b/Products/ZenCallHome/tests/testCallHomeGeneration.py @@ -7,33 +7,37 @@ # ############################################################################## - -import time import json +import time from datetime import datetime - -from zope.interface import Interface, implements +from zope.interface import Interface, implementer +from Zope2.App import zcml from Products.ZenTestCase.BaseTestCase import BaseTestCase -from Zope2.App import zcml import Products.ZenCallHome + from Products.ZenCallHome import ICallHomeCollector -from Products.ZenCallHome.callhome import (CallHomeCollector, CallHomeData, - EXTERNAL_ERROR_KEY, - REPORT_DATE_KEY, - VERSION_HISTORIES_KEY) +from Products.ZenCallHome.callhome import ( + CallHomeCollector, + CallHomeData, + EXTERNAL_ERROR_KEY, + REPORT_DATE_KEY, + VERSION_HISTORIES_KEY, +) from Products.ZenCallHome.VersionHistory import ( - VERSION_START_KEY, - KeyedVersionHistoryCallHomeCollector) + KeyedVersionHistoryCallHomeCollector, + VERSION_START_KEY, +) from Products.ZenCallHome.transport import ( - CallHome, - CallHomeData as PersistentCallHomeData) + CallHome, + CallHomeData as PersistentCallHomeData, +) -DATETIME_ISOFORMAT = '%Y-%m-%dT%H:%M:%S.%f' +DATETIME_ISOFORMAT = "%Y-%m-%dT%H:%M:%S.%f" TEST_DATA = """ -""" # noqa E501 +""" # noqa E501 FAILING_TEST_DATA = """ -""" # noqa E501 +""" # noqa E501 SIMPLE_SUCCESS_COLLECTOR = """ -""" # noqa E501 +""" # noqa E501 SIMPLE_SUCCESS_KEY = "simplesuccess" @@ -71,7 +75,7 @@ provides="Products.ZenCallHome.ICallHomeCollector" name="fastfail"/> -""" # noqa E501 +""" # noqa E501 FAST_FAIL_KEY = "fastfail" @@ -85,20 +89,18 @@ provides="Products.ZenCallHome.IVersionHistoryCallHomeCollector" name="testversionhistory"/> -""" # noqa E501 +""" # noqa E501 class ITestCallHomeData(Interface): - """ - """ + """ """ + def callHomeData(self): - """ - """ + """ """ +@implementer(ITestCallHomeData) class TestCallHomeData(object): - implements(ITestCallHomeData) - def callHomeData(self): yield "test", "test" @@ -107,18 +109,15 @@ class FailingTestDataException(Exception): pass +@implementer(ITestCallHomeData) class FailingTestCallHomeData(object): - implements(ITestCallHomeData) - def callHomeData(self): raise FailingTestDataException(FAILING_DATA_ERROR_MESSAGE) +@implementer(ICallHomeCollector) class SimpleSuccessCollector(CallHomeCollector): - """ - Default success collector as a control variable - """ - implements(ICallHomeCollector) + """Default success collector as a control variable.""" def __init__(self): super(SimpleSuccessCollector, self).__init__(ITestCallHomeData) @@ -129,11 +128,9 @@ class FastFailTestException(Exception): pass +@implementer(ICallHomeCollector) class FastFailCollector(CallHomeCollector): - """ - Default success collector as a control variable - """ - implements(ICallHomeCollector) + """Default success collector as a control variable.""" def __init__(self): super(FastFailCollector, self).__init__(ITestCallHomeData) @@ -142,6 +139,7 @@ def __init__(self): def generateData(self): raise FastFailTestException(FAST_FAIL_ERROR_MESSAGE) + TEST_VERSION_HISTORY_ENTITY = "testentity" TEST_VERSION_1 = "testversion1" TEST_VERSION_2 = "testversion2" @@ -153,22 +151,22 @@ def returnHistory(): class TestVersionHistoryCollector(KeyedVersionHistoryCallHomeCollector): - """ - """ + """ """ + def __init__(self): super(TestVersionHistoryCollector, self).__init__( - TEST_VERSION_HISTORY_ENTITY, {}) + TEST_VERSION_HISTORY_ENTITY, {} + ) def getCurrentVersion(self, dmd, callHomeData): return returnHistory() class testCallHomeGeneration(BaseTestCase): - def afterSetUp(self): super(testCallHomeGeneration, self).afterSetUp() - zcml.load_config('meta.zcml', Products.ZenCallHome) - zcml.load_config('configure.zcml', Products.ZenCallHome) + zcml.load_config("meta.zcml", Products.ZenCallHome) + zcml.load_config("configure.zcml", Products.ZenCallHome) def beforeTearDown(self): super(testCallHomeGeneration, self).beforeTearDown() @@ -199,8 +197,9 @@ def testCallHomeCollectorFailure(self): self.assertTrue(FAST_FAIL_KEY not in data) self.assertTrue("Zenoss App Data" in data) self.assertTrue(EXTERNAL_ERROR_KEY in data) - self.assertEquals(FAST_FAIL_ERROR_MESSAGE, - data[EXTERNAL_ERROR_KEY][0]['exception']) + self.assertEquals( + FAST_FAIL_ERROR_MESSAGE, data[EXTERNAL_ERROR_KEY][0]["exception"] + ) def testConstituentDataFailure(self): # check current version of report (should be empty?) @@ -224,8 +223,10 @@ def testConstituentDataFailure(self): successData = data[SIMPLE_SUCCESS_KEY] self.assertTrue("test" in successData) self.assertTrue(EXTERNAL_ERROR_KEY in data) - self.assertEquals(FAILING_DATA_ERROR_MESSAGE, - data[EXTERNAL_ERROR_KEY][0]['exception']) + self.assertEquals( + FAILING_DATA_ERROR_MESSAGE, + data[EXTERNAL_ERROR_KEY][0]["exception"], + ) def testPayloadGeneration(self): # check current version of report (should be empty) @@ -262,24 +263,28 @@ def testPayloadGeneration(self): payloadObj = json.loads(payload) # make sure payload has the required fields - self.assertTrue('product' in payloadObj) - self.assertTrue('uuid' in payloadObj) - self.assertTrue('symkey' in payloadObj) - self.assertTrue('metrics' in payloadObj) + self.assertTrue("product" in payloadObj) + self.assertTrue("uuid" in payloadObj) + self.assertTrue("symkey" in payloadObj) + self.assertTrue("metrics" in payloadObj) # reconstitute metrics obj & make sure send date is present # and has a valid time - metricsObj = json.loads(payloadObj['metrics']) - self.assertTrue('Send Date' in metricsObj) - sendDateDT = datetime.strptime(metricsObj['Send Date'], - DATETIME_ISOFORMAT) - reportDateDT = datetime.strptime(metricsObj['Report Date'], - DATETIME_ISOFORMAT) + metricsObj = json.loads(payloadObj["metrics"]) + self.assertTrue("Send Date" in metricsObj) + sendDateDT = datetime.strptime( + metricsObj["Send Date"], DATETIME_ISOFORMAT + ) + reportDateDT = datetime.strptime( + metricsObj["Report Date"], DATETIME_ISOFORMAT + ) self.assertTrue(reportDateDT < sendDateDT) - self.assertTrue(beforeReportGeneration <= reportDateDT - <= afterReportGeneration) - self.assertTrue(beforePayloadGeneration <= sendDateDT - <= afterPayloadGeneration) + self.assertTrue( + beforeReportGeneration <= reportDateDT <= afterReportGeneration + ) + self.assertTrue( + beforePayloadGeneration <= sendDateDT <= afterPayloadGeneration + ) def testZenossVersionHistory(self): # check current version of report (should be empty?) @@ -289,13 +294,13 @@ def testZenossVersionHistory(self): chd = CallHomeData(self.dmd, True) data = chd.getData() reportDate = data[REPORT_DATE_KEY] - zenossVersion = data['Zenoss App Data']['Zenoss'] + zenossVersion = data["Zenoss App Data"]["Zenoss"] # make sure report has Zenoss version history record self.assertTrue(VERSION_HISTORIES_KEY in data) versionHistories = data[VERSION_HISTORIES_KEY] - self.assertTrue('Zenoss' in versionHistories) - versionHistory = versionHistories['Zenoss'] + self.assertTrue("Zenoss" in versionHistories) + versionHistory = versionHistories["Zenoss"] self.assertTrue(zenossVersion in versionHistory) historyRecord = versionHistory[zenossVersion] self.assertTrue(VERSION_START_KEY in historyRecord) @@ -384,23 +389,24 @@ def testSendMethod(self): # reconstitute metrics obj & make sure send date is present # and has a valid time - metricsObj = json.loads(payloadObj['metrics']) - self.assertTrue('Send Method' in metricsObj) - self.assertEquals('directpost', metricsObj['Send Method']) + metricsObj = json.loads(payloadObj["metrics"]) + self.assertTrue("Send Method" in metricsObj) + self.assertEquals("directpost", metricsObj["Send Method"]) # Fetch the payload the browserjs way payloadGenerator = CallHome(self.dmd) - payload = payloadGenerator.get_payload(method='browserjs', - doEncrypt=False) + payload = payloadGenerator.get_payload( + method="browserjs", doEncrypt=False + ) # reconstitute object payloadObj = json.loads(payload) # reconstitute metrics obj & make sure send date is present # and has a valid time - metricsObj = json.loads(payloadObj['metrics']) - self.assertTrue('Send Method' in metricsObj) - self.assertEquals('browserjs', metricsObj['Send Method']) + metricsObj = json.loads(payloadObj["metrics"]) + self.assertTrue("Send Method" in metricsObj) + self.assertEquals("browserjs", metricsObj["Send Method"]) def testGenerateReportWithEmptyMetricsField(self): # Make sure that an empty metrics field @@ -412,21 +418,21 @@ def testGenerateReportWithEmptyMetricsField(self): # call callhome scripting chd = CallHomeData(self.dmd, True) - data = chd.getData() # noqa F841 + data = chd.getData() # noqa F841 # Then handle empty string value self.dmd.callHome.metrics = "" # call callhome scripting chd = CallHomeData(self.dmd, True) - data = chd.getData() # noqa F841 + data = chd.getData() # noqa F841 # Then handle whitespace-only string value self.dmd.callHome.metrics = " " # call callhome scripting chd = CallHomeData(self.dmd, True) - data = chd.getData() # noqa F841 + data = chd.getData() # noqa F841 # # UNFORTUNATELY CANNOT EASILY UNIT TEST TIMEOUTS BECAUSE @@ -438,6 +444,7 @@ def testGenerateReportWithEmptyMetricsField(self): def test_suite(): from unittest import TestSuite, makeSuite + suite = TestSuite() suite.addTest(makeSuite(testCallHomeGeneration)) return suite diff --git a/Products/ZenCallHome/tests/testVersionHistory.py b/Products/ZenCallHome/tests/testVersionHistory.py index ff4cdbdcfe..084344abb7 100644 --- a/Products/ZenCallHome/tests/testVersionHistory.py +++ b/Products/ZenCallHome/tests/testVersionHistory.py @@ -7,16 +7,16 @@ # ############################################################################## - from datetime import datetime, timedelta - from Products.ZenTestCase.BaseTestCase import BaseTestCase + from Products.ZenCallHome.callhome import REPORT_DATE_KEY from Products.ZenCallHome.VersionHistory import ( - VERSION_START_KEY, - VERSION_HISTORIES_KEY, - KeyedVersionHistoryCallHomeCollector) + VERSION_START_KEY, + VERSION_HISTORIES_KEY, + KeyedVersionHistoryCallHomeCollector, +) TEST_ENTITY = "testentity" @@ -34,8 +34,8 @@ TEST_VERSION_VALUE_1 = "versionstring_1" TEST_VERSION_VALUE_2 = "versionstring_2" REPORT_DATE_VALUE_1 = datetime.utcnow() -REPORT_DATE_VALUE_2 = (REPORT_DATE_VALUE_1 + timedelta(days=1)) -REPORT_DATE_VALUE_3 = (REPORT_DATE_VALUE_2 + timedelta(days=1)) +REPORT_DATE_VALUE_2 = REPORT_DATE_VALUE_1 + timedelta(days=1) +REPORT_DATE_VALUE_3 = REPORT_DATE_VALUE_2 + timedelta(days=1) REPORT_DATE_VALUE_1 = REPORT_DATE_VALUE_1.isoformat() REPORT_DATE_VALUE_2 = REPORT_DATE_VALUE_2.isoformat() REPORT_DATE_VALUE_3 = REPORT_DATE_VALUE_3.isoformat() @@ -46,30 +46,31 @@ def createTestCallHomeData(): "histprop1": "testvalue1", "app": { "histprop2": "testvalue2", - "testversion": TEST_VERSION_VALUE_1 - }, - REPORT_DATE_KEY: REPORT_DATE_VALUE_1 - } + "testversion": TEST_VERSION_VALUE_1, + }, + REPORT_DATE_KEY: REPORT_DATE_VALUE_1, + } + TEST_KEY_MAP = { HISTPROP1_KEY: PROP1_TARGET_KEY, - HISTPROP2_KEY: PROP2_TARGET_KEY + HISTPROP2_KEY: PROP2_TARGET_KEY, } class TestVersionHistoryCollector(KeyedVersionHistoryCallHomeCollector): - """ - """ + """ """ + def __init__(self): - super(TestVersionHistoryCollector, self).__init__(TEST_ENTITY, - TEST_KEY_MAP) + super(TestVersionHistoryCollector, self).__init__( + TEST_ENTITY, TEST_KEY_MAP + ) def getCurrentVersion(self, dmd, callHomeData): return self.getKeyedValue(TEST_VERSION_KEY, callHomeData) class testVersionHistory(BaseTestCase): - def afterSetUp(self): super(testVersionHistory, self).afterSetUp() # zcml.load_config('meta.zcml', Products.ZenCallHome) @@ -101,8 +102,9 @@ def testVersionHistory(self): self.assertTrue(TEST_VERSION_VALUE_1 in versionHistory) historyRecord = versionHistory[TEST_VERSION_VALUE_1] self.assertTrue(VERSION_START_KEY in historyRecord) - self.assertEquals(REPORT_DATE_VALUE_1, - historyRecord[VERSION_START_KEY]) + self.assertEquals( + REPORT_DATE_VALUE_1, historyRecord[VERSION_START_KEY] + ) self.assertTrue(PROP1_TARGET_KEY in historyRecord) self.assertEquals(HISTPROP1_VALUE, historyRecord[PROP1_TARGET_KEY]) self.assertTrue(PROP2_TARGET_KEY in historyRecord) @@ -128,8 +130,9 @@ def testVersionHistory(self): self.assertTrue(TEST_VERSION_VALUE_1 in versionHistory) historyRecord = versionHistory[TEST_VERSION_VALUE_1] self.assertTrue(VERSION_START_KEY in historyRecord) - self.assertEquals(REPORT_DATE_VALUE_1, - historyRecord[VERSION_START_KEY]) + self.assertEquals( + REPORT_DATE_VALUE_1, historyRecord[VERSION_START_KEY] + ) self.assertTrue(PROP1_TARGET_KEY in historyRecord) self.assertEquals(HISTPROP1_VALUE, historyRecord[PROP1_TARGET_KEY]) self.assertTrue(PROP2_TARGET_KEY in historyRecord) @@ -137,7 +140,7 @@ def testVersionHistory(self): # Update the version and report date. testCallHomeData[REPORT_DATE_KEY] = REPORT_DATE_VALUE_3 - testCallHomeData['app']['testversion'] = TEST_VERSION_VALUE_2 + testCallHomeData["app"]["testversion"] = TEST_VERSION_VALUE_2 # Update the version history collector.addVersionHistory(self.dmd, testCallHomeData) @@ -153,17 +156,20 @@ def testVersionHistory(self): self.assertTrue(TEST_VERSION_VALUE_2 in versionHistory) historyRecord = versionHistory[TEST_VERSION_VALUE_2] self.assertTrue(VERSION_START_KEY in historyRecord) - self.assertEquals(REPORT_DATE_VALUE_3, - historyRecord[VERSION_START_KEY]) + self.assertEquals( + REPORT_DATE_VALUE_3, historyRecord[VERSION_START_KEY] + ) self.assertTrue(PROP1_TARGET_KEY in historyRecord) - self.assertEquals(HISTPROP1_SECONDVALUE, - historyRecord[PROP1_TARGET_KEY]) + self.assertEquals( + HISTPROP1_SECONDVALUE, historyRecord[PROP1_TARGET_KEY] + ) self.assertTrue(PROP2_TARGET_KEY in historyRecord) self.assertEquals(HISTPROP2_VALUE, historyRecord[PROP2_TARGET_KEY]) def test_suite(): from unittest import TestSuite, makeSuite + suite = TestSuite() suite.addTest(makeSuite(testVersionHistory)) return suite diff --git a/Products/ZenCallHome/transport/__init__.py b/Products/ZenCallHome/transport/__init__.py index ce13baf8ca..7ccdbf1010 100644 --- a/Products/ZenCallHome/transport/__init__.py +++ b/Products/ZenCallHome/transport/__init__.py @@ -7,7 +7,6 @@ # ############################################################################## - import base64 import json import logging @@ -15,36 +14,38 @@ import string import time import zlib + from datetime import datetime from persistent.dict import PersistentDict +from Persistence import Persistent +from zenoss.protocols.services.zep import ZepConnectionError from zope.component import getUtilitiesFor -from Persistence import Persistent -from Products.ZenCallHome.transport.crypt import encrypt, decrypt -from Products.ZenCallHome.transport.interfaces import IReturnPayloadProcessor from Products.ZenUtils.Version import Version from Products.Zuul import getFacade -from zenoss.protocols.services.zep import ZepConnectionError -from Products.ZenCallHome.CallHomeStatus import CallHomeStatus -__doc__ = ("Callhome mechanism. Reports anonymous statistics " + - "back to Zenoss, Inc.") +from ..CallHomeStatus import CallHomeStatus +from .crypt import encrypt, decrypt +from .interfaces import IReturnPayloadProcessor + +__doc__ = ( + "Callhome mechanism. Reports anonymous statistics back to Zenoss, Inc." +) # number of seconds between successful checkins -CHECKIN_WAIT = 60*60*24 +CHECKIN_WAIT = 60 * 60 * 24 # number of seconds between checkin attempts (per method) -CHECKIN_ATTEMPT_WAIT = 60*60*2 +CHECKIN_ATTEMPT_WAIT = 60 * 60 * 2 -logger = logging.getLogger('zen.callhome') +logger = logging.getLogger("zen.callhome") def is_callhome_disabled(dmd): - return not getattr(dmd, 'versionCheckOptIn', True) + return not getattr(dmd, "versionCheckOptIn", True) class CallHome(object): - def __init__(self, dmd): self.dmd = dmd try: @@ -55,58 +56,54 @@ def __init__(self, dmd): self.chs = CallHomeStatus() def attempt(self, method): - ''' + """ Decide whether or not to attempt a callhome. This is computed from the time elapsed from last successful callhome, or time elapsed from the last attempt via the method passed in with the 'method' param. - ''' - if (is_callhome_disabled(self.dmd) and - not self.callHome.requestCallhome): + """ + if ( + is_callhome_disabled(self.dmd) + and not self.callHome.requestCallhome + ): return False - now = long(time.time()) + now = int(time.time()) # If we have waited long enough between checkings or attempts (or one # has been requested), and we have metrics to send and are not # currently updating them, then attempt a callhome if ( - ( - now - self.callHome.lastAttempt[method] > CHECKIN_ATTEMPT_WAIT - and - now - self.callHome.lastSuccess > CHECKIN_WAIT - ) - or - self.callHome.requestCallhome - ): + now - self.callHome.lastAttempt[method] > CHECKIN_ATTEMPT_WAIT + and now - self.callHome.lastSuccess > CHECKIN_WAIT + ) or self.callHome.requestCallhome: if ( self.callHome.metrics - and - not self.callHome.requestMetricsGather - ): + and not self.callHome.requestMetricsGather + ): self.callHome.lastAttempt[method] = now self.callHome.requestCallhome = False return True return False - def get_payload(self, method='directpost', doEncrypt=True): - ''' + def get_payload(self, method="directpost", doEncrypt=True): + """ Retrieve the current callhome payload to send. This is the call that occurs at send/request time (as opposed to the time that the report was generated). - ''' + """ payload = {} # product info - payload['product'] = self.dmd.getProductName() - payload['uuid'] = self.dmd.uuid or "NOT ACTIVATED" - payload['symkey'] = self.callHome.symmetricKey + payload["product"] = self.dmd.getProductName() + payload["uuid"] = self.dmd.uuid or "NOT ACTIVATED" + payload["symkey"] = self.callHome.symmetricKey metrics = self.callHome.metrics metricsObj = json.loads(metrics) - metricsObj['Send Date'] = datetime.utcnow().isoformat() - metricsObj['Send Method'] = method + metricsObj["Send Date"] = datetime.utcnow().isoformat() + metricsObj["Send Method"] = method - payload['metrics'] = json.dumps(metricsObj) + payload["metrics"] = json.dumps(metricsObj) payloadString = json.dumps(payload) if doEncrypt: @@ -115,62 +112,73 @@ def get_payload(self, method='directpost', doEncrypt=True): return payloadString def save_return_payload(self, returnPayload): - ''' + """ Process and save the data returned from the callhome server. This always includes versioning and crypto key changes, and may include other data to be processed by plugins to the IReturnPayloadProcessor interface. - ''' + """ try: - returnPayload = zlib.decompress(base64.urlsafe_b64decode( - returnPayload)) + returnPayload = zlib.decompress( + base64.urlsafe_b64decode(returnPayload) + ) returnPayload = json.loads(returnPayload) except Exception: - logger.debug('Error decoding return payload from server') + logger.debug("Error decoding return payload from server") return - if all(x in returnPayload for x in ('currentPublicKey', - 'revocationList')): + if all( + x in returnPayload for x in ("currentPublicKey", "revocationList") + ): # TODO: VERIFY revocation list, and apply - newPubkey = returnPayload.get('currentPublicKey') + newPubkey = returnPayload.get("currentPublicKey") if self.callHome.publicKey != newPubkey: self.callHome.publicKey = newPubkey - if 'encrypted' in returnPayload: - base64data = base64.urlsafe_b64decode(str(returnPayload.get( - 'encrypted'))) + if "encrypted" in returnPayload: + base64data = base64.urlsafe_b64decode( + str(returnPayload.get("encrypted")) + ) data = json.loads(decrypt(base64data, self.callHome.symmetricKey)) - if 'compliancereport' in data: - data['compliancereport']['pdf'] = base64.urlsafe_b64decode(str( - data['compliancereport']['pdf'])) + if "compliancereport" in data: + data["compliancereport"]["pdf"] = base64.urlsafe_b64decode( + str(data["compliancereport"]["pdf"]) + ) - if 'latestVersion' in data: + if "latestVersion" in data: # Save the latest version, and send a # message if new version available - self.dmd.lastVersionCheck = long(time.time()) - available = Version.parse('Zenoss ' + data['latestVersion']) - if (getattr(self.dmd, 'availableVersion', '') - != available.short()): + self.dmd.lastVersionCheck = int(time.time()) + available = Version.parse("Zenoss " + data["latestVersion"]) + if ( + getattr(self.dmd, "availableVersion", "") + != available.short() + ): self.dmd.availableVersion = available.short() if self.dmd.About.getZenossVersion() < available: try: import socket - zep = getFacade('zep') - summary = ('A new version of Zenoss (%s)' + - 'has been released') % available.short() - zep.create(summary, 'Info', socket.getfqdn()) + + zep = getFacade("zep") + summary = ( + "A new version of Zenoss (%s)" + + "has been released" + ) % available.short() + zep.create(summary, "Info", socket.getfqdn()) except ZepConnectionError: - logger.warning("ZEP not running - can't send " + - "new version event") + logger.warning( + "ZEP not running - can't send " + + "new version event" + ) # Go through other data in the return payload, and process for name, utility in getUtilitiesFor(IReturnPayloadProcessor): if name in data: utility.process(self.dmd, data[name]) - self.callHome.lastSuccess = long(time.time()) - self.chs.updateStat('lastSuccess', long(time.time())) + self.callHome.lastSuccess = int(time.time()) + self.chs.updateStat("lastSuccess", int(time.time())) return @@ -180,13 +188,15 @@ def __init__(self): self.requestCallhome = False self.lastAttempt = PersistentDict() - self.lastAttempt['browserjs'] = 0 - self.lastAttempt['directpost'] = 0 - - self.publicKey = 'EC7EFA98' - keyParts = ((random.choice(string.ascii_letters + string.digits) - for x in range(64))) - self.symmetricKey = ''.join(keyParts) + self.lastAttempt["browserjs"] = 0 + self.lastAttempt["directpost"] = 0 + + self.publicKey = "EC7EFA98" + keyParts = ( + random.choice(string.ascii_letters + string.digits) + for x in range(64) + ) + self.symmetricKey = "".join(keyParts) self.metrics = None self.lastMetricsGather = 0 diff --git a/Products/ZenCallHome/transport/crypt/__init__.py b/Products/ZenCallHome/transport/crypt/__init__.py index 70388d7123..02ccf91e38 100644 --- a/Products/ZenCallHome/transport/crypt/__init__.py +++ b/Products/ZenCallHome/transport/crypt/__init__.py @@ -7,49 +7,61 @@ # ############################################################################## - import logging import os import subprocess from Products.ZenUtils.Utils import zenPath -logger = logging.getLogger('zen.callhome') +logger = logging.getLogger("zen.callhome") -CRYPTPATH = zenPath('Products', 'ZenCallHome', 'transport', 'crypt') -GPGCMD = 'gpg --batch --no-tty --quiet --no-auto-check-trustdb ' +CRYPTPATH = zenPath("Products", "ZenCallHome", "transport", "crypt") +GPGCMD = "gpg --batch --no-tty --quiet --no-auto-check-trustdb " def _getEnv(): env = os.environ.copy() - env.pop('GPG_AGENT_INFO', None) + env.pop("GPG_AGENT_INFO", None) return env def encrypt(stringToEncrypt, publicKey): - cmd = (GPGCMD + '--keyring %s --trustdb-name %s -e -r %s' % - (CRYPTPATH + '/pubring.gpg', - CRYPTPATH + '/trustdb.gpg', - publicKey)) + cmd = GPGCMD + "--keyring %s --trustdb-name %s -e -r %s" % ( + CRYPTPATH + "/pubring.gpg", + CRYPTPATH + "/trustdb.gpg", + publicKey, + ) - p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=_getEnv(), - stdout=subprocess.PIPE, stderr=open(os.devnull)) + p = subprocess.Popen( + cmd, + shell=True, + stdin=subprocess.PIPE, + env=_getEnv(), + stdout=subprocess.PIPE, + stderr=open(os.devnull), + ) out = p.communicate(input=stringToEncrypt)[0] if p.returncode != 0: - logger.warn('Unable to encrypt payload -- is GPG installed?') + logger.warn("Unable to encrypt payload -- is GPG installed?") return None return out def decrypt(stringToDecrypt, symKey): - cmd = GPGCMD + '--passphrase %s -d' % symKey + cmd = GPGCMD + "--passphrase %s -d" % symKey - p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=_getEnv(), - stdout=subprocess.PIPE, stderr=open(os.devnull)) + p = subprocess.Popen( + cmd, + shell=True, + stdin=subprocess.PIPE, + env=_getEnv(), + stdout=subprocess.PIPE, + stderr=open(os.devnull), + ) out = p.communicate(input=stringToDecrypt)[0] if p.returncode != 0: - logger.warn('Unable to decrypt payload -- is GPG installed?') + logger.warn("Unable to decrypt payload -- is GPG installed?") return None return out diff --git a/Products/ZenCallHome/transport/cycler.py b/Products/ZenCallHome/transport/cycler.py index f6d8bf335a..f4518435dc 100644 --- a/Products/ZenCallHome/transport/cycler.py +++ b/Products/ZenCallHome/transport/cycler.py @@ -7,32 +7,33 @@ # ############################################################################## - import logging import os import time import transaction + +from twisted.internet import reactor from twisted.internet.protocol import ProcessProtocol from twisted.internet.task import LoopingCall -from twisted.internet import reactor -from Products.ZenCallHome.transport import CallHome -from Products.ZenCallHome.transport.methods.directpost import direct_post from Products.ZenUtils.Utils import zenPath from Products.Zuul.utils import safe_hasattr -from Products.ZenCallHome.CallHomeStatus import CallHomeStatus + +from ..CallHomeStatus import CallHomeStatus +from . import CallHome +from .methods.directpost import direct_post # number of seconds between metrics updates -GATHER_METRICS_INTERVAL = 60*60*24*30 +GATHER_METRICS_INTERVAL = 60 * 60 * 24 * 30 -logger = logging.getLogger('zen.callhome') +logger = logging.getLogger("zen.callhome") class CallHomeCycler(object): def __init__(self, dmd): self.dmd = dmd - if not safe_hasattr(dmd, 'callHome') or dmd.callHome is None: + if not safe_hasattr(dmd, "callHome") or dmd.callHome is None: dmd._p_jar.sync() CallHome(dmd).callHome transaction.commit() @@ -46,34 +47,33 @@ def run(self): chs = CallHomeStatus() chs.stage(chs.START_CALLHOME) try: - now = long(time.time()) + now = int(time.time()) self.dmd._p_jar.sync() # Start metrics gather if needed if ( - ( - now - self.callhome.lastMetricsGather > - GATHER_METRICS_INTERVAL - or - self.callhome.requestMetricsGather - ) - and not - self.gatherProtocol - ): + now - self.callhome.lastMetricsGather > GATHER_METRICS_INTERVAL + or self.callhome.requestMetricsGather + ) and not self.gatherProtocol: self.gatherProtocol = GatherMetricsProtocol() self.callhome.requestMetricsGather = True # Update metrics if run complete - if self.gatherProtocol and (self.gatherProtocol.data or - self.gatherProtocol.failed): + if self.gatherProtocol and ( + self.gatherProtocol.data or self.gatherProtocol.failed + ): chs.stage(chs.GPROTOCOL) if not self.gatherProtocol.failed: self.callhome.metrics = self.gatherProtocol.data try: chs.stage(chs.GPROTOCOL, "FINISHED") chs.stage(chs.UPDATE_REPORT, "FINISHED") - chs.updateStat('lastTook', int(time.time()) - chs.getStat('startedAt')) + chs.updateStat( + "lastTook", int(time.time()) - chs.getStat("startedAt") + ) except Exception as e: - logger.warning("Callhome cycle status update failed: '%r'", e) + logger.warning( + "Callhome cycle status update failed: '%r'", e + ) self.callhome.lastMetricsGather = now self.callhome.requestMetricsGather = False self.gatherProtocol = None @@ -88,15 +88,15 @@ def run(self): class GatherMetricsProtocol(ProcessProtocol): - def __init__(self): self.data = None self.failed = False self.output = [] self.error = [] - chPath = zenPath('Products', 'ZenCallHome', 'callhome.py') - reactor.spawnProcess(self, 'python', args=['python', chPath, '-M'], - env=os.environ) + chPath = zenPath("Products", "ZenCallHome", "callhome.py") + reactor.spawnProcess( + self, "python", args=["python", chPath, "-M"], env=os.environ + ) def outReceived(self, data): self.output.append(data) @@ -105,11 +105,17 @@ def errReceived(self, data): self.error.append(data) def processEnded(self, reason): - out = ''.join(self.output) - err = ''.join(self.error) + out = "".join(self.output) + err = "".join(self.error) if reason.value.exitCode != 0: self.failed = True - logger.warning(('Callhome metrics gathering failed: ' + - 'stdout: %s, stderr: %s'), out, err) + logger.warning( + ( + "Callhome metrics gathering failed: " + + "stdout: %s, stderr: %s" + ), + out, + err, + ) else: self.data = out diff --git a/Products/ZenCallHome/transport/methods/browserjs.py b/Products/ZenCallHome/transport/methods/browserjs.py index c7f5fedba8..ebfda11334 100644 --- a/Products/ZenCallHome/transport/methods/browserjs.py +++ b/Products/ZenCallHome/transport/methods/browserjs.py @@ -7,7 +7,6 @@ # ############################################################################## - import base64 import json import logging @@ -23,52 +22,60 @@ from Products.ZenCallHome.transport import CallHome -JS_CALLHOME_URL = 'https://callhome.zenoss.com/callhome/v2/js' +JS_CALLHOME_URL = "https://callhome.zenoss.com/callhome/v2/js" MAX_GET_SIZE = 768 -logger = logging.getLogger('zen.callhome') +logger = logging.getLogger("zen.callhome") def split_to_range(strToSplit, maxSize): - return ([strToSplit[i:i+maxSize] - for i in range(0, len(strToSplit), maxSize)]) + return [ + strToSplit[i : i + maxSize] for i in range(0, len(strToSplit), maxSize) + ] def encode_for_js(toEnc): base64ToEnc = base64.urlsafe_b64encode(toEnc) - randToken = (''.join(random.choice(string.ascii_letters + string.digits) - for x in range(8))) + randToken = "".join( + random.choice(string.ascii_letters + string.digits) for x in range(8) + ) encPackets = split_to_range(base64ToEnc, MAX_GET_SIZE) - encPackets = [json.dumps({ - 'idx': x, - 'tot': len(encPackets), - 'rnd': randToken, - 'dat': encPackets[x]}) for x in range(len(encPackets))] + encPackets = [ + json.dumps( + { + "idx": x, + "tot": len(encPackets), + "rnd": randToken, + "dat": encPackets[x], + } + ) + for x in range(len(encPackets)) + ] return [base64.urlsafe_b64encode(zlib.compress(x)) for x in encPackets] +@interface.implementer(IHeadExtraManager) class ScriptTag(viewlet.ViewletBase): """ JS script tag injector for browser-based checkins """ - interface.implements(IHeadExtraManager) def render(self): dmd = self.context.dmd # if not logged in, inject nothing if not dmd.ZenUsers.getUserSettings(): - return '' + return "" callhome = CallHome(dmd) # if we've checked in or attempted to check in recently, inject nothing - if not callhome.attempt('browserjs'): - return '' + if not callhome.attempt("browserjs"): + return "" - payload = callhome.get_payload(method='browserjs') + payload = callhome.get_payload(method="browserjs") if not payload: - logger.warning('Error getting or encrypting payload for browserjs') - return '' + logger.warning("Error getting or encrypting payload for browserjs") + return "" # Output the checkin data to a js snippet, wait a few seconds in the # browser, and inject script tags to the checkin url to the body tag. @@ -89,8 +96,10 @@ def render(self): }; var task = new Ext.util.DelayedTask(Zenoss.Callhome_next); task.delay(5000); - """ % (json.dumps(encode_for_js(payload)), - JS_CALLHOME_URL) + """ % ( + json.dumps(encode_for_js(payload)), + JS_CALLHOME_URL, + ) class CallhomeRouter(DirectRouter): @@ -98,4 +107,4 @@ def checkin(self, returnPayload): # record successful check in callhome = CallHome(self.context.dmd) callhome.save_return_payload(returnPayload) - return '' + return "" diff --git a/Products/ZenCallHome/transport/methods/directpost.py b/Products/ZenCallHome/transport/methods/directpost.py index 2c8a7317db..5e3cf73ff8 100644 --- a/Products/ZenCallHome/transport/methods/directpost.py +++ b/Products/ZenCallHome/transport/methods/directpost.py @@ -7,34 +7,34 @@ # ############################################################################## - import base64 import logging -from urllib import urlencode import urllib2 -from Products.ZenCallHome.transport import CallHome +from urllib import urlencode + from Products.ZenCallHome.CallHomeStatus import CallHomeStatus +from Products.ZenCallHome.transport import CallHome -POST_CHECKIN_URL = 'https://callhome.zenoss.com/callhome/v2/post' +POST_CHECKIN_URL = "https://callhome.zenoss.com/callhome/v2/post" _URL_TIMEOUT = 5 -logger = logging.getLogger('zen.callhome') +logger = logging.getLogger("zen.callhome") def direct_post(dmd): callhome = CallHome(dmd) chs = CallHomeStatus() - if not callhome.attempt('directpost'): + if not callhome.attempt("directpost"): return payload = callhome.get_payload() if not payload: - logger.warning('Error getting or encrypting payload for direct-post') + logger.warning("Error getting or encrypting payload for direct-post") return payload = base64.urlsafe_b64encode(payload) - params = urlencode({'enc': payload}) + params = urlencode({"enc": payload}) chs.stage(chs.REQUEST_CALLHOME) try: @@ -42,9 +42,7 @@ def direct_post(dmd): returnPayload = httpreq.read() except Exception as e: chs.stage(chs.REQUEST_CALLHOME, "FAILED", str(e)) - logger.warning('Error retrieving data from callhome server %s', e) + logger.warning("Error retrieving data from callhome server %s", e) else: chs.stage(chs.REQUEST_CALLHOME, "FINISHED") callhome.save_return_payload(returnPayload) - - return diff --git a/Products/ZenCallHome/transport/methods/versioncheck.py b/Products/ZenCallHome/transport/methods/versioncheck.py index 3813d9552c..a6683a27a7 100644 --- a/Products/ZenCallHome/transport/methods/versioncheck.py +++ b/Products/ZenCallHome/transport/methods/versioncheck.py @@ -7,30 +7,31 @@ # ############################################################################## - import json import logging import time -from urllib import urlencode + import urllib2 +from urllib import urlencode + from Products.ZenUtils.Version import Version -VERSION_CHECK_URL = 'https://callhome.zenoss.com/callhome/v2/versioncheck' +VERSION_CHECK_URL = "https://callhome.zenoss.com/callhome/v2/versioncheck" _URL_TIMEOUT = 5 -logger = logging.getLogger('zen.callhome') +logger = logging.getLogger("zen.callhome") def version_check(dmd): - params = urlencode({'product': dmd.getProductName()}) + params = urlencode({"product": dmd.getProductName()}) try: httpreq = urllib2.urlopen(VERSION_CHECK_URL, params, _URL_TIMEOUT) returnPayload = json.loads(httpreq.read()) except Exception as e: - logger.warning('Error retrieving version from callhome server: %s', e) + logger.warning("Error retrieving version from callhome server: %s", e) else: - available = Version.parse('Zenoss ' + returnPayload['latest']) + available = Version.parse("Zenoss " + returnPayload["latest"]) version = available.short() - dmd.lastVersionCheck = long(time.time()) - if getattr(dmd, 'availableVersion', '') != version: + dmd.lastVersionCheck = int(time.time()) + if getattr(dmd, "availableVersion", "") != version: dmd.availableVersion = version diff --git a/Products/ZenCollector/CollectorCmdBase.py b/Products/ZenCollector/CollectorCmdBase.py deleted file mode 100644 index 2382e43eb5..0000000000 --- a/Products/ZenCollector/CollectorCmdBase.py +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2012, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -import sys - -import zope.component - -from Products.ZenUtils.CmdBase import CmdBase - -from .daemon import CollectorDaemon -from .interfaces import IWorkerExecutor, IWorkerTaskFactory -from .tasks import SimpleTaskSplitter - - -class CollectorCmdBase(CmdBase): - def __init__( - self, - iCollectorWorkerClass, - iCollectorPreferencesClass, - noopts=0, - args=None, - ): - super(CollectorCmdBase, self).__init__(noopts, args) - self.workerClass = iCollectorWorkerClass - self.prefsClass = iCollectorPreferencesClass - - def run(self): - if "--worker" in sys.argv: - executor = zope.component.getUtility(IWorkerExecutor) - executor.setWorkerClass(self.workerClass) - executor.run() - else: - myPreferences = self.prefsClass() - myTaskFactory = zope.component.getUtility(IWorkerTaskFactory) - myTaskFactory.setWorkerClass(self.workerClass) - myTaskSplitter = SimpleTaskSplitter(myTaskFactory) - daemon = CollectorDaemon(myPreferences, myTaskSplitter) - myTaskFactory.postInitialization() - self.log = daemon.log - daemon.run() diff --git a/Products/ZenCollector/DeviceConfigCache.py b/Products/ZenCollector/DeviceConfigCache.py deleted file mode 100644 index 558e6411e4..0000000000 --- a/Products/ZenCollector/DeviceConfigCache.py +++ /dev/null @@ -1,50 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2011, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -import os - -from Products.ZenUtils.FileCache import FileCache - - -class DeviceConfigCache(object): - def __init__(self, basepath): - self.basepath = basepath - - def _getFileCache(self, monitor): - return FileCache(os.path.join(self.basepath, monitor)) - - def cacheConfigProxies(self, prefs, configs): - for cfg in configs: - self.updateConfigProxy(prefs, cfg) - - def updateConfigProxy(self, prefs, config): - cache = self._getFileCache(prefs.options.monitor) - key = config.configId - cache[key] = config - - def deleteConfigProxy(self, prefs, deviceid): - cache = self._getFileCache(prefs.options.monitor) - key = deviceid - try: - del cache[key] - except KeyError: - pass - - def getConfigProxies(self, prefs, cfgids): - cache = self._getFileCache(prefs.options.monitor) - if cfgids: - ret = [] - for cfgid in cfgids: - if cfgid in cache: - config = cache[cfgid] - if config: - ret.append(config) - return ret - else: - return filter(None, cache.values()) diff --git a/Products/ZenCollector/__init__.py b/Products/ZenCollector/__init__.py index 6f5ce1f780..d5c7c44b79 100644 --- a/Products/ZenCollector/__init__.py +++ b/Products/ZenCollector/__init__.py @@ -70,40 +70,3 @@ daemon = CollectorDaemon(myPreferences, myTaskSplitter) daemon.run() """ - -import zope.component -import zope.interface - -from .config import ConfigurationLoaderTask, ConfigurationProxy -from .interfaces import IFrameworkFactory -from .scheduler import Scheduler - - -@zope.interface.implementer(IFrameworkFactory) -class CoreCollectorFrameworkFactory(object): - def __init__(self): - self._configProxy = ConfigurationProxy() - self._scheduler = None - self._configurationLoader = ConfigurationLoaderTask - - def getConfigurationProxy(self): - return self._configProxy - - def getScheduler(self): - if self._scheduler is None: - self._scheduler = Scheduler() - return self._scheduler - - def getConfigurationLoaderTask(self): - return self._configurationLoader - - def getFrameworkBuildOptions(self): - return None - - -# Install the core collector framework factory as a Zope utility so it is -# available to all, and replaceable if necessary. -__factory__ = CoreCollectorFrameworkFactory() -zope.component.provideUtility(__factory__, IFrameworkFactory) -zope.component.provideUtility(__factory__, IFrameworkFactory, "core") -zope.component.provideUtility(__factory__, IFrameworkFactory, "nosip") diff --git a/Products/ZenCollector/collector.zcml b/Products/ZenCollector/collector.zcml new file mode 100644 index 0000000000..41f958fa4b --- /dev/null +++ b/Products/ZenCollector/collector.zcml @@ -0,0 +1,20 @@ + + + + + + + + diff --git a/Products/ZenCollector/config.py b/Products/ZenCollector/config.py deleted file mode 100644 index dc20210777..0000000000 --- a/Products/ZenCollector/config.py +++ /dev/null @@ -1,378 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2009, 2010, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -""" -The config module provides the implementation of the IConfigurationProxy -interface used within Zenoss Core. This implementation provides basic -configuration retrieval services directly from a remote ZenHub service. -""" - -import logging -import time - -import zope.component -import zope.interface - -from cryptography.fernet import Fernet -from metrology import Metrology -from twisted.internet import defer -from twisted.python.failure import Failure - -from Products.ZenHub.PBDaemon import HubDown -from Products.ZenUtils.observable import ObservableMixin - -from .interfaces import ( - ICollector, - ICollectorPreferences, - IFrameworkFactory, - IConfigurationProxy, - IScheduledTask, - IDataService, - IEventService, -) -from .tasks import TaskStates - -log = logging.getLogger("zen.collector.config") - - -@zope.interface.implementer(IConfigurationProxy) -class ConfigurationProxy(object): - """ - This implementation of IConfigurationProxy provides basic configuration - retrieval from the remote ZenHub instance using the remote configuration - service proxy as specified by the collector's configuration. - """ - - _cipher_suite = None - - def getPropertyItems(self, prefs): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - self._collector = zope.component.queryUtility(ICollector) - serviceProxy = self._collector.getRemoteConfigServiceProxy() - - # Load any configuration properties for this daemon - log.debug("Fetching daemon configuration properties") - d = serviceProxy.callRemote("getConfigProperties") - d.addCallback(lambda result: dict(result)) - return d - - def getThresholdClasses(self, prefs): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - self._collector = zope.component.queryUtility(ICollector) - serviceProxy = self._collector.getRemoteConfigServiceProxy() - - log.debug("Fetching threshold classes") - d = serviceProxy.callRemote("getThresholdClasses") - return d - - def getThresholds(self, prefs): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - self._collector = zope.component.queryUtility(ICollector) - serviceProxy = self._collector.getRemoteConfigServiceProxy() - - log.debug("Fetching collector thresholds") - d = serviceProxy.callRemote("getCollectorThresholds") - return d - - def getConfigProxies(self, prefs, ids=[]): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - self._collector = zope.component.queryUtility(ICollector) - serviceProxy = self._collector.getRemoteConfigServiceProxy() - - log.debug("Fetching configurations") - # get options from prefs.options and send to remote - d = serviceProxy.callRemote( - "getDeviceConfigs", ids, options=prefs.options.__dict__ - ) - return d - - def deleteConfigProxy(self, prefs, id): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - # not implemented in the basic ConfigurationProxy - return defer.succeed(None) - - def updateConfigProxy(self, prefs, config): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - # not implemented in the basic ConfigurationProxy - return defer.succeed(None) - - def getConfigNames(self, result, prefs): - if not ICollectorPreferences.providedBy(prefs): - raise TypeError("config must provide ICollectorPreferences") - - self._collector = zope.component.queryUtility(ICollector) - serviceProxy = self._collector.getRemoteConfigServiceProxy() - - log.debug("Fetching device names") - d = serviceProxy.callRemote( - "getDeviceNames", options=prefs.options.__dict__ - ) - - def printNames(names): - log.debug( - "workerid %s Fetched Names %s %s", - prefs.options.workerid, - len(names), - names, - ) - return names - - d.addCallback(printNames) - return d - - @defer.inlineCallbacks - def _get_cipher_suite(self): - """ - Fetch the encryption key for this collector from zenhub. - """ - if self._cipher_suite is None: - self._collector = zope.component.queryUtility(ICollector) - proxy = self._collector.getRemoteConfigServiceProxy() - try: - key = yield proxy.callRemote("getEncryptionKey") - self._cipher_suite = Fernet(key) - except Exception as e: - log.warn("Remote exception: %s", e) - self._cipher_suite = None - defer.returnValue(self._cipher_suite) - - @defer.inlineCallbacks - def encrypt(self, data): - """ - Encrypt data using a key from zenhub. - """ - cipher_suite = yield self._get_cipher_suite() - encrypted_data = None - if cipher_suite: - try: - encrypted_data = yield cipher_suite.encrypt(data) - except Exception as e: - log.warn("Exception encrypting data %s", e) - defer.returnValue(encrypted_data) - - @defer.inlineCallbacks - def decrypt(self, data): - """ - Decrypt data using a key from zenhub. - """ - cipher_suite = yield self._get_cipher_suite() - decrypted_data = None - if cipher_suite: - try: - decrypted_data = yield cipher_suite.decrypt(data) - except Exception as e: - log.warn("Exception decrypting data %s", e) - defer.returnValue(decrypted_data) - - -@zope.interface.implementer(IScheduledTask) -class ConfigurationLoaderTask(ObservableMixin): - """ - A task that periodically retrieves collector configuration via the - IConfigurationProxy service. - """ - - STATE_CONNECTING = "CONNECTING" - STATE_FETCH_MISC_CONFIG = "FETCHING_MISC_CONFIG" - STATE_FETCH_DEVICE_CONFIG = "FETCHING_DEVICE_CONFIG" - STATE_PROCESS_DEVICE_CONFIG = "PROCESSING_DEVICE_CONFIG" - - _frameworkFactoryName = "core" - - def __init__( - self, - name, - configId=None, - scheduleIntervalSeconds=None, - taskConfig=None, - ): - super(ConfigurationLoaderTask, self).__init__() - self._fetchConfigTimer = Metrology.timer("collectordaemon.configs") - - # Needed for interface - self.name = name - self.configId = configId if configId else name - self.state = TaskStates.STATE_IDLE - - self._dataService = zope.component.queryUtility(IDataService) - self._eventService = zope.component.queryUtility(IEventService) - - if taskConfig is None: - raise TypeError("taskConfig cannot be None") - self._prefs = taskConfig - self.interval = self._prefs.configCycleInterval * 60 - self.options = self._prefs.options - - self._daemon = zope.component.getUtility(ICollector) - self._daemon.heartbeatTimeout = self.options.heartbeatTimeout - log.debug( - "Heartbeat timeout set to %ds", self._daemon.heartbeatTimeout - ) - - frameworkFactory = zope.component.queryUtility( - IFrameworkFactory, self._frameworkFactoryName - ) - self._configProxy = frameworkFactory.getConfigurationProxy() - - self.devices = [] - self.startDelay = 0 - - def doTask(self): - """ - Contact zenhub and gather configuration data. - - @return: A task to gather configs - @rtype: Twisted deferred object - """ - log.debug("%s gathering configuration", self.name) - self.startTime = time.time() - - # Were we given a command-line option to collect a single device? - if self.options.device: - self.devices = [self.options.device] - - d = self._baseConfigs() - self._deviceConfigs(d, self.devices) - d.addCallback(self._notifyConfigLoaded) - d.addErrback(self._handleError) - return d - - def _baseConfigs(self): - """ - Load the configuration that doesn't depend on loading devices. - """ - d = self._fetchPropertyItems() - d.addCallback(self._processPropertyItems) - d.addCallback(self._fetchThresholdClasses) - d.addCallback(self._processThresholdClasses) - d.addCallback(self._fetchThresholds) - d.addCallback(self._processThresholds) - return d - - def _deviceConfigs(self, d, devices): - """ - Load the device configuration - """ - d.addCallback(self._fetchConfig, devices) - d.addCallback(self._processConfig) - - def _notifyConfigLoaded(self, result): - # This method is prematuraly called in enterprise bc - # _splitConfiguration calls defer.succeed after creating - # a new task for incremental loading - self._daemon.runPostConfigTasks() - return defer.succeed("Configuration loaded") - - def _handleError(self, result): - if isinstance(result, Failure): - log.error( - "Task %s configure failed: %s", - self.name, - result.getErrorMessage(), - ) - - # stop if a single device was requested and nothing found - if self.options.device or not self.options.cycle: - self._daemon.stop() - - ex = result.value - if isinstance(ex, HubDown): - result = str(ex) - # Allow the loader to be reaped and re-added - self.state = TaskStates.STATE_COMPLETED - return result - - def _fetchPropertyItems(self, previous_cb_result=None): - return defer.maybeDeferred( - self._configProxy.getPropertyItems, self._prefs - ) - - def _fetchThresholdClasses(self, previous_cb_result): - return defer.maybeDeferred( - self._configProxy.getThresholdClasses, self._prefs - ) - - def _fetchThresholds(self, previous_cb_result): - return defer.maybeDeferred( - self._configProxy.getThresholds, self._prefs - ) - - def _fetchConfig(self, result, devices): - self.state = self.STATE_FETCH_DEVICE_CONFIG - start = time.time() - - def recordTime(result): - # get in milliseconds - duration = int((time.time() - start) * 1000) - self._fetchConfigTimer.update(duration) - return result - - d = defer.maybeDeferred( - self._configProxy.getConfigProxies, self._prefs, devices - ) - d.addCallback(recordTime) - return d - - def _processPropertyItems(self, propertyItems): - log.debug("Processing received property items") - self.state = self.STATE_FETCH_MISC_CONFIG - if propertyItems: - self._daemon._setCollectorPreferences(propertyItems) - - def _processThresholdClasses(self, thresholdClasses): - log.debug("Processing received threshold classes") - if thresholdClasses: - self._daemon._loadThresholdClasses(thresholdClasses) - - def _processThresholds(self, thresholds): - log.debug("Processing received thresholds") - if thresholds: - self._daemon._configureThresholds(thresholds) - - @defer.inlineCallbacks - def _processConfig(self, configs, purgeOmitted=True): - log.debug("Processing %s received device configs", len(configs)) - if self.options.device: - configs = [ - cfg - for cfg in configs - if self.options.device in (cfg.id, cfg.configId) - ] - if not configs: - log.error( - "Configuration for %s unavailable -- " - "is that the correct name?", - self.options.device, - ) - - if not configs: - # No devices (eg new install), -d name doesn't exist or - # device explicitly ignored by zenhub service. - if not self.options.cycle: - self._daemon.stop() - defer.returnValue(["No device configuration to load"]) - - self.state = self.STATE_PROCESS_DEVICE_CONFIG - yield self._daemon._updateDeviceConfigs(configs, purgeOmitted) - defer.returnValue(configs) - - def cleanup(self): - pass # Required by interface diff --git a/Products/ZenCollector/config/__init__.py b/Products/ZenCollector/config/__init__.py new file mode 100644 index 0000000000..5f629923b3 --- /dev/null +++ b/Products/ZenCollector/config/__init__.py @@ -0,0 +1,22 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from .proxy import ConfigurationProxy +from .task import ( + ConfigurationLoaderTask, + ManyDeviceConfigLoader, + SingleDeviceConfigLoader, +) + +__all__ = ( + "ConfigurationLoaderTask", + "ConfigurationProxy", + "ManyDeviceConfigLoader", + "SingleDeviceConfigLoader", +) diff --git a/Products/ZenCollector/config/proxy.py b/Products/ZenCollector/config/proxy.py new file mode 100644 index 0000000000..1e049c4365 --- /dev/null +++ b/Products/ZenCollector/config/proxy.py @@ -0,0 +1,142 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2009, 2010, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +""" +The config module provides the implementation of the IConfigurationProxy +interface used within Zenoss Core. This implementation provides basic +configuration retrieval services directly from a remote ZenHub service. +""" + +import logging + +from cryptography.fernet import Fernet +from twisted.internet import defer +from zope.component import queryUtility +from zope.interface import implementer + +from ..interfaces import ICollector, IConfigurationProxy + +log = logging.getLogger("zen.collector.configurationproxy") + + +@implementer(IConfigurationProxy) +class ConfigurationProxy(object): + """ + This implementation of IConfigurationProxy provides basic configuration + retrieval from the remote ZenHub instance using the remote configuration + service proxy as specified by the collector's configuration. + """ + + _cipher_suite = None + + def __init__(self, prefs): + super(ConfigurationProxy, self).__init__() + self._prefs = prefs + self._collector = queryUtility(ICollector) + + @defer.inlineCallbacks + def getPropertyItems(self): + ref = yield self._collector.getRemoteConfigServiceProxy() + result = yield ref.callRemote("getConfigProperties") + log.info("fetched daemon configuration properties") + props = dict(result) + defer.returnValue(props) + + @defer.inlineCallbacks + def getThresholdClasses(self): + ref = yield self._collector.getRemoteConfigServiceProxy() + classes = yield ref.callRemote("getThresholdClasses") + log.info("fetched threshold classes") + defer.returnValue(classes) + + @defer.inlineCallbacks + def getThresholds(self): + ref = yield self._collector.getRemoteConfigServiceProxy() + try: + thresholds = yield ref.callRemote("getCollectorThresholds") + log.info("fetched collector thresholds") + defer.returnValue(thresholds) + except Exception: + log.exception("getThresholds failed") + + @defer.inlineCallbacks + def getConfigProxies(self, token, deviceIds): + ref = yield self._collector.getRemoteConfigCacheProxy() + + log.debug("fetching configurations") + # get options from prefs.options and send to remote + proxies = yield ref.callRemote( + "getDeviceConfigs", + self._prefs.configurationService, + token, + deviceIds, + options=self._prefs.options.__dict__, + ) + defer.returnValue(proxies) + + @defer.inlineCallbacks + def getConfigNames(self): + ref = yield self._collector.getRemoteConfigCacheProxy() + + # log.info("fetching device names") + names = yield ref.callRemote( + "getDeviceNames", + self._prefs.configurationService, + options=self._prefs.options.__dict__, + ) + log.info( + "workerid %s fetched names %s %s", + self._prefs.options.workerid, + len(names), + names, + ) + defer.returnValue(names) + + @defer.inlineCallbacks + def _get_cipher_suite(self): + """ + Fetch the encryption key for this collector from zenhub. + """ + if self._cipher_suite is None: + ref = yield self._collector.getRemoteConfigServiceProxy() + try: + key = yield ref.callRemote("getEncryptionKey") + self._cipher_suite = Fernet(key) + except Exception as e: + log.warn("remote exception: %s", e) + self._cipher_suite = None + defer.returnValue(self._cipher_suite) + + @defer.inlineCallbacks + def encrypt(self, data): + """ + Encrypt data using a key from zenhub. + """ + cipher_suite = yield self._get_cipher_suite() + encrypted_data = None + if cipher_suite: + try: + encrypted_data = yield cipher_suite.encrypt(data) + except Exception as e: + log.warn("exception encrypting data %s", e) + defer.returnValue(encrypted_data) + + @defer.inlineCallbacks + def decrypt(self, data): + """ + Decrypt data using a key from zenhub. + """ + cipher_suite = yield self._get_cipher_suite() + decrypted_data = None + if cipher_suite: + try: + decrypted_data = yield cipher_suite.decrypt(data) + except Exception as e: + log.warn("exception decrypting data %s", e) + defer.returnValue(decrypted_data) diff --git a/Products/ZenCollector/config/task.py b/Products/ZenCollector/config/task.py new file mode 100644 index 0000000000..dfe179b067 --- /dev/null +++ b/Products/ZenCollector/config/task.py @@ -0,0 +1,141 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2009, 2010, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import logging +import time + +from twisted.internet import defer + +log = logging.getLogger("zen.collector.config") + + +class ConfigurationLoaderTask(object): + """ + Periodically retrieves collector configuration via the + IConfigurationProxy service. + """ + + # Deprecated attribute kept because zenvsphere uses it for reasons + # that are no longer relevant. + STATE_FETCH_DEVICE_CONFIG = "n/a" + + def __init__(self, collector, proxy): + self._collector = collector + self._proxy = proxy + + @defer.inlineCallbacks + def __call__(self): + try: + properties = yield self._proxy.getPropertyItems() + self._processPropertyItems(properties) + + thresholdClasses = yield self._proxy.getThresholdClasses() + self._processThresholdClasses(thresholdClasses) + + thresholds = yield self._proxy.getThresholds() + self._processThresholds(thresholds) + + yield self._collector.runPostConfigTasks() + except Exception: + log.exception( + "failed to retrieve collector configuration " + "collection-daemon=%s", + self._collector.name, + ) + + def _processPropertyItems(self, propertyItems): + log.debug("processing received property items") + if propertyItems: + self._collector._setCollectorPreferences(propertyItems) + + def _processThresholdClasses(self, thresholdClasses): + log.debug("processing received threshold classes") + if thresholdClasses: + self._collector.loadThresholdClasses(thresholdClasses) + + def _processThresholds(self, thresholds): + log.debug("processing received thresholds") + if thresholds: + self._collector._configureThresholds(thresholds) + + +class SingleDeviceConfigLoader(object): + """Handles retrieving the config of a single device.""" + + def __init__(self, deviceid, collector, service, options, callback): + self._deviceId = deviceid + self._collector = collector + self._service = service + self._options = options + self._callback = callback + + @property + def deviceIds(self): + return [self._deviceId] + + @defer.inlineCallbacks + def __call__(self): + try: + ref = yield self._collector.getRemoteConfigCacheProxy() + + log.debug("fetching device config for %s", self._deviceId) + # get options from prefs.options and send to remote + config = yield ref.callRemote( + "getDeviceConfig", + self._service, + self._deviceId, + options=self._options.__dict__, + ) + yield self._callback(config) + except Exception: + log.exception("failed to retrieve device configs") + + +class ManyDeviceConfigLoader(object): + """Handles retrieving devices from the ConfigCache service.""" + + def __init__(self, proxy, callback): + self._proxy = proxy + self._callback = callback + self._deviceIds = set() + self._changes_since = 0 + + @property + def deviceIds(self): + return self._deviceIds + + @defer.inlineCallbacks + def __call__(self): + log.debug("fetching device configs") + try: + next_time = time.time() + config_data = yield self._proxy.getConfigProxies( + self._changes_since, self._deviceIds + ) + yield self._processConfigs(config_data) + self._changes_since = next_time + except Exception: + log.exception("failed to retrieve device configs") + + @defer.inlineCallbacks + def _processConfigs(self, config_data): + new = config_data.get("new", ()) + updated = config_data.get("updated", ()) + removed = config_data.get("removed", ()) + try: + try: + yield self._callback(new, updated, removed) + finally: + self._update_local_cache(new, updated, removed) + except Exception: + log.exception("failed to process device configs") + + def _update_local_cache(self, new, updated, removed): + self._deviceIds.difference_update(removed) + self._deviceIds.update(cfg.id for cfg in new) diff --git a/Products/ZenCollector/configcache/__init__.py b/Products/ZenCollector/configcache/__init__.py new file mode 100644 index 0000000000..bf7884f0d0 --- /dev/null +++ b/Products/ZenCollector/configcache/__init__.py @@ -0,0 +1,8 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## diff --git a/Products/ZenCollector/configcache/__main__.py b/Products/ZenCollector/configcache/__main__.py new file mode 100644 index 0000000000..03365d61b7 --- /dev/null +++ b/Products/ZenCollector/configcache/__main__.py @@ -0,0 +1,21 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +__all__ = ("main",) + + +def main(): + from .configcache import main + main() + + +if __name__ == "__main__": + main() diff --git a/Products/ZenCollector/configcache/api.py b/Products/ZenCollector/configcache/api.py new file mode 100644 index 0000000000..515c52c766 --- /dev/null +++ b/Products/ZenCollector/configcache/api.py @@ -0,0 +1,98 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import + +import logging +import time + +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from .cache import DeviceQuery +from .dispatcher import DeviceConfigTaskDispatcher, OidMapTaskDispatcher +from .handlers import ( + NewDeviceHandler, + DeviceUpdateHandler, + MissingConfigsHandler, +) +from .utils import DeviceProperties, getDeviceConfigServices, OidMapProperties + +log = logging.getLogger("zen.configcache") + + +class ConfigCache(object): + """ + Implements an API for manipulating the Configuration Cache to the rest + of the system. + """ + + @classmethod + def new(cls): + client = getRedisClient(url=getRedisUrl()) + devicestore = createObject("deviceconfigcache-store", client) + configClasses = getDeviceConfigServices() + devicedispatcher = DeviceConfigTaskDispatcher(configClasses) + oidmapstore = createObject("oidmapcache-store", client) + oidmapdispatcher = OidMapTaskDispatcher() + return cls( + devicestore, devicedispatcher, oidmapstore, oidmapdispatcher + ) + + def __init__(self, devstore, devdispatch, oidstore, oiddispatch): + self.__new = NewDeviceHandler(log, devstore, devdispatch) + self.__update = DeviceUpdateHandler(log, devstore, devdispatch) + self.__missing = MissingConfigsHandler(log, devstore, devdispatch) + self.__stores = type( + "Store", + (object,), + { + "device": devstore, + "oidmap": oidstore, + } + )() + self.__oidmapdispatcher = oiddispatch + + def update_device(self, device): + """ + Expires or retires existing configs for the device and sends build + jobs to speculatively create new configurations for the device. + May also delete configurations if a job produces no config for + configuration that existed previously. + """ + monitor = device.getPerformanceServerName() + if monitor is None: + raise RuntimeError( + "Device '%s' is not a member of a collector" % (device.id,) + ) + props = DeviceProperties(device) + buildlimit = props.build_timeout + # Check for device class change + stored_uid = self.__stores.device.get_uid(device.id) + if device.getPrimaryPath() != stored_uid: + self.__new(device.id, monitor, buildlimit, False) + else: + # Note: the store's `search` method only returns keys for configs + # that exist. + keys_with_config = tuple( + self.__stores.device.search( + DeviceQuery(monitor=monitor, device=device.id) + ) + ) + minttl = props.minimum_ttl + self.__update(keys_with_config, minttl) + self.__missing(device.id, monitor, keys_with_config, buildlimit) + + def update_oidmaps(self): + """ + Expires the cached oidmap data. + """ + timeout = OidMapProperties().build_timeout + self.__oidmapdispatcher.dispatch(timeout, time.time()) diff --git a/Products/ZenCollector/configcache/app/__init__.py b/Products/ZenCollector/configcache/app/__init__.py new file mode 100644 index 0000000000..c972e8d2cb --- /dev/null +++ b/Products/ZenCollector/configcache/app/__init__.py @@ -0,0 +1,17 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .base import Application +from .init import initialize_environment +from .pid import pidfile + + +__all__ = ("Application", "initialize_environment", "pidfile") diff --git a/Products/ZenCollector/configcache/app/args.py b/Products/ZenCollector/configcache/app/args.py new file mode 100644 index 0000000000..84a4bb6a57 --- /dev/null +++ b/Products/ZenCollector/configcache/app/args.py @@ -0,0 +1,53 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import argparse as _argparse + +from Products.ZenUtils.terminal_size import ( + get_terminal_size as _get_terminal_size, +) + + +def get_arg_parser(description, epilog=None): + parser = _argparse.ArgumentParser( + description=description, + epilog=epilog, + formatter_class=ZenHelpFormatter, + ) + _fix_optional_args_title(parser) + return parser + + +def get_subparser(subparsers, name, description=None, parent=None): + subparser = subparsers.add_parser( + name, + description=description + ".", + help=description, + parents=[parent] if parent else [], + formatter_class=ZenHelpFormatter, + ) + _fix_optional_args_title(subparser, name.capitalize()) + return subparser + + +def _fix_optional_args_title(parser, title="General"): + for grp in parser._action_groups: + if grp.title == "optional arguments": + grp.title = "{} Options".format(title) + + +class ZenHelpFormatter(_argparse.ArgumentDefaultsHelpFormatter): + """ + Derive to set the COLUMNS environment variable when displaying help. + """ + + def __init__(self, *args, **kwargs): + size = _get_terminal_size() + kwargs["width"] = size.columns - 2 + super(ZenHelpFormatter, self).__init__(*args, **kwargs) diff --git a/Products/ZenCollector/configcache/app/base.py b/Products/ZenCollector/configcache/app/base.py new file mode 100644 index 0000000000..507df9d048 --- /dev/null +++ b/Products/ZenCollector/configcache/app/base.py @@ -0,0 +1,139 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +from signal import signal, SIGTERM, SIGHUP, SIGINT +from threading import Event + +import attr + +from MySQLdb import OperationalError + +from ..utils import MetricReporter + +from .config import add_config_arguments, getConfigFromArguments +from .init import initialize_environment +from .genconf import GenerateConfig +from .logger import add_logging_arguments, setup_logging, setup_debug_logging +from .pid import add_pidfile_arguments, pidfile +from .zodb import add_zodb_arguments, zodb + +_delay = 10 # seconds + + +class Application(object): + """Base class for applications.""" + + @classmethod + def from_args(cls, args): + config = getConfigFromArguments(args.parser, args) + return cls(config, args.task) + + def __init__(self, config, task): + # config data from config files and CLI args + self.config = config + self.task = task + + def run(self): + configs = getattr(self.task, "configs", ()) + overrides = getattr(self.task, "config_overrides", ()) + initialize_environment(configs=configs, overrides=overrides) + setup_logging(self.config) + setup_debug_logging(self.config) + with pidfile(self.config): + stop = Event() + set_shutdown_handler(lambda x, y: _handle_signal(stop, x, y)) + controller = _Controller(stop) + log = logging.getLogger( + "zen.{}".format(self.task.__module__.split(".", 2)[-1]) + ) + log.info("application has started") + try: + # Setup Metric Reporting + prefix = getattr(self.task, "metric_prefix", "") + metric_reporter = MetricReporter( + tags={"internal": True}, prefix=prefix + ) + + # Run the application loop + while not controller.shutdown: + try: + with zodb(self.config) as (db, session, dmd): + ctx = ApplicationContext( + controller, + db, + session, + dmd, + metric_reporter, + ) + self.task(self.config, ctx).run() + except OperationalError as oe: + log.warn("Lost database connection: %s", oe) + except Exception: + log.exception("unhandled error") + controller.wait(_delay) + except BaseException as e: + log.warn("shutting down due to %s", e) + controller.quit() + finally: + log.info("application is quitting") + + @staticmethod + def add_genconf_command(subparsers, parsers): + GenerateConfig.add_command(subparsers, parsers) + pass + + @staticmethod + def add_all_arguments(parser): + add_config_arguments(parser) + add_pidfile_arguments(parser) + add_logging_arguments(parser) + add_zodb_arguments(parser) + + add_config_arguments = staticmethod(add_config_arguments) + add_pidfile_arguments = staticmethod(add_pidfile_arguments) + add_logging_arguments = staticmethod(add_logging_arguments) + add_zodb_arguments = staticmethod(add_zodb_arguments) + + +@attr.s(frozen=True, slots=True) +class ApplicationContext(object): + controller = attr.ib() + db = attr.ib() + session = attr.ib() + dmd = attr.ib() + metric_reporter = attr.ib() + + +class _Controller(object): + def __init__(self, stop): + self.__stop = stop + + @property + def shutdown(self): + return self.__stop.is_set() + + def quit(self): + self.__stop.set() + + def wait(self, interval): + self.__stop.wait(interval) + + +def _handle_signal(stop, signum, frame): + stop.set() + + +def set_shutdown_handler(func): + signal(SIGTERM, func) + signal(SIGHUP, func) + signal(SIGINT, func) diff --git a/Products/ZenCollector/configcache/app/config.py b/Products/ZenCollector/configcache/app/config.py new file mode 100644 index 0000000000..b7cba77f85 --- /dev/null +++ b/Products/ZenCollector/configcache/app/config.py @@ -0,0 +1,101 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import os + +from Products.ZenUtils.config import Config, ConfigLoader +from Products.ZenUtils.GlobalConfig import getGlobalConfiguration +from Products.ZenUtils.Utils import zenPath + + +def add_config_arguments(parser): + filename = "-".join(parser.prog.split(" ")[:-1]) + ".conf" + parser.add_argument( + "-C", + "--configfile", + default=os.path.join(zenPath("etc"), filename), + help="Pathname of the configuration file" + ) + + +def getConfigFromArguments(parser, args): + """ + Return a dict containing the configuration. + + @type args: argparse.Namespace + """ + options = tuple( + (cfg_name, opt_name, xform, default) + for cfg_name, opt_name, xform, default in ( + ( + _long_name(act.option_strings), + act.dest, + act.type if act.type is not None else _identity, + act.default, + ) + for act in parser._actions + if act.dest not in ("help", "version", "configfile") + ) + if cfg_name is not None + ) + dest_names = { + long_name: dest_name + for long_name, dest_name, _, _ in options + } + xforms = { + long_name: xform + for long_name, _, xform, _ in options + } + defaults = { + long_name: default + for long_name, _, _, default in options + } + config = defaults.copy() + config.update( + (key, xforms[key](value)) + for key, value in getGlobalConfiguration().items() + if key in dest_names + ) + + configfile = getattr(args, "configfile", None) + if configfile: + app_config_loader = ConfigLoader(configfile, Config) + try: + config.update( + (key, xforms[key](value)) + for key, value in app_config_loader().items() + if key in dest_names + ) + except IOError as ex: + # Re-raise exception if the error is not "File not found" + if ex.errno != 2: + raise + + # Apply command-line overrides. An override is a value from the + # command line that differs from the default. This does mean that + # explicitely specified default values on the CLI are ignored. + config.update( + (cname, override) + for cname, default, override in ( + (cname, defaults[cname], getattr(args, oname, None)) + for cname, oname in dest_names.items() + ) + if override != default + ) + return config + + +def _long_name(names): + name = next((nm for nm in names if nm.startswith("--")), None) + if name: + return name[2:] + + +def _identity(value): + return value diff --git a/Products/ZenCollector/configcache/app/genconf.py b/Products/ZenCollector/configcache/app/genconf.py new file mode 100644 index 0000000000..2f2b223f5d --- /dev/null +++ b/Products/ZenCollector/configcache/app/genconf.py @@ -0,0 +1,60 @@ +from __future__ import print_function + +import textwrap + +from .args import get_subparser + +# List of options to not include when generating a config file. +_ARGS_TO_IGNORE = ( + "", + "configfile", + "help", +) + + +class GenerateConfig(object): + + description = "Write an example config file to stdout" + + @staticmethod + def add_command(subparsers, parsers): + subp_genconf = get_subparser( + subparsers, "genconf", GenerateConfig.description + ) + subp_genconf.set_defaults( + factory=GenerateConfig, + parsers=parsers, + ) + + def __init__(self, args): + self._parsers = args.parsers + + def run(self): + actions = [] + for parser in self._parsers: + for action in parser._actions: + if action.dest in _ARGS_TO_IGNORE: + continue + if any(action.dest == act[1] for act in actions): + continue + actions.append( + ( + action.help, + action.dest.replace("_", "-"), + action.default, + ) + ) + print(_item_as_text(actions[0])) + for act in actions[1:]: + print() + print(_item_as_text(act)) + + +def _item_as_text(item): + return "{}\n#{} {}".format( + "\n".join( + "# {}".format(line) for line in textwrap.wrap(item[0], width=75) + ), + item[1], + item[2], + ) diff --git a/Products/ZenCollector/configcache/app/init.py b/Products/ZenCollector/configcache/app/init.py new file mode 100644 index 0000000000..263c6a7ad8 --- /dev/null +++ b/Products/ZenCollector/configcache/app/init.py @@ -0,0 +1,56 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import sys + +import six + +from zope.configuration import xmlconfig + + +def initialize_environment(configs=(), overrides=(), useZope=True): + if useZope: + _use_zope(configs=configs, overrides=overrides) + else: + _no_zope(configs=configs, overrides=overrides) + + +def _use_zope(configs, overrides): + from Zope2.App import zcml + from OFS.Application import import_products + from Products.ZenUtils.zenpackload import load_zenpacks + import Products.ZenWidgets + + import_products() + load_zenpacks() + zcml.load_site() + _load_overrides( + zcml._context, [("scriptmessaging.zcml", Products.ZenWidgets)] + ) + _load_configs(zcml._context, configs) + _load_overrides(zcml._context, overrides) + + +def _no_zope(configs, overrides): + ctx = xmlconfig._getContext() + _load_configs(ctx, configs) + _load_overrides(ctx, overrides) + + +def _load_configs(ctx, configs): + for filename, module in configs: + if isinstance(module, six.string_types): + module = sys.modules[module] + xmlconfig.file(filename, package=module, context=ctx) + + +def _load_overrides(ctx, overrides): + for filepath, module in overrides: + xmlconfig.includeOverrides(ctx, filepath, package=module) + ctx.execute_actions() diff --git a/Products/ZenCollector/configcache/app/logger.py b/Products/ZenCollector/configcache/app/logger.py new file mode 100644 index 0000000000..ef79e4b600 --- /dev/null +++ b/Products/ZenCollector/configcache/app/logger.py @@ -0,0 +1,189 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import argparse +import copy +import logging +import logging.config +import logging.handlers +import os +import signal + +from Products.ZenUtils.Utils import zenPath + +_default_config_template = { + "version": 1, + "disable_existing_loggers": True, + "filters": {}, + "formatters": { + "main": { + "format": ( + "%(asctime)s.%(msecs)03d %(levelname)s %(name)s: " + "%(message)s" + ), + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + }, + "handlers": { + "main": { + "formatter": "main", + "class": "cloghandler.ConcurrentRotatingFileHandler", + "filename": None, + "maxBytes": None, + "backupCount": None, + "mode": "a", + "filters": [], + } + }, + "loggers": { + "": {"level": logging.WARN}, + "zen": {"level": logging.NOTSET}, + }, + "root": { + "handlers": ["main"], + }, +} + + +def setup_logging(config): + """Create formatting for log entries and set default log level.""" + logconfig = copy.deepcopy(_default_config_template) + loglevel = config["log-level"] + logconfig["loggers"]["zen"]["level"] = loglevel + logconfig["handlers"]["main"]["filename"] = config["log-filename"] + logconfig["handlers"]["main"]["maxBytes"] = ( + config["log-max-file-size"] * 1024 + ) + logconfig["handlers"]["main"]["backupCount"] = config["log-max-file-count"] + logging.config.dictConfig(logconfig) + + +def setup_debug_logging(config): + # Allow the user to dynamically lower and raise the logging + # level without restarts. + try: + signal.signal( + signal.SIGUSR1, + lambda x, y: _debug_logging_switch(config["log-level"], x, y), + ) + except ValueError: + # If we get called multiple times, this will generate an exception: + # ValueError: signal only works in main thread + # Ignore it as we've already set up the signal handler. + pass + + +def _debug_logging_switch(default_level, signum, frame): + zenlog = logging.getLogger("zen") + currentlevel = zenlog.getEffectiveLevel() + if currentlevel == logging.DEBUG: + if currentlevel == default_level: + return + zenlog.setLevel(default_level) + logging.getLogger().setLevel(logging.WARN) + zenlog.info( + "restored logging level back to %s (%d)", + logging.getLevelName(default_level) or "unknown", + default_level, + ) + else: + zenlog.setLevel(logging.NOTSET) + logging.getLogger().setLevel(logging.DEBUG) + zenlog.info( + "logging level set to %s (%d)", + logging.getLevelName(logging.DEBUG), + logging.DEBUG, + ) + + +def _level_as_int(v): + try: + return int(v) + except ValueError: + return logging.getLevelName(v.upper()) + + +def _add_log_suffix(v): + if not v.endswith(".log"): + if not os.path.basename(v): + raise ValueError("no filename for log file given") + return v + ".log" + return v + + +class LogLevel(argparse.Action): + """Define a 'logging level' action for argparse.""" + + def __init__( + self, + option_strings, + dest, + nargs=None, + const=None, + default="info", + type=None, + choices=None, + help="Default logging severity level", + **kwargs + ): + if nargs is not None: + raise ValueError("'nargs' not supported for LogLevel action") + if type is not None: + raise ValueError("'type' not supported for LogLevel action") + if const is not None: + raise ValueError("'const' not supported for LogLevel action") + choices = tuple( + value + for pair in sorted( + (level_id, level_name.lower()) + for level_id, level_name in logging._levelNames.items() + if isinstance(level_id, int) and level_id != 0 + ) + for value in pair + ) + super(LogLevel, self).__init__( + option_strings, + dest, + default=default, + type=_level_as_int, + choices=choices, + help=help, + **kwargs + ) + + def __call__(self, parser, namespace, values=None, option_string=None): + setattr(namespace, self.dest, values) + + +def add_logging_arguments(parser): + group = parser.add_argument_group("Logging Options") + group.add_argument("-v", "--log-level", action=LogLevel) + filename = "-".join(parser.prog.split(" ")[:-1]) + ".log" + dirname = zenPath("log") + group.add_argument( + "--log-filename", + default=os.path.join(dirname, filename), + type=_add_log_suffix, + help="Pathname of the log file. If a directory path is not " + "specified, the log file is save to {}".format(dirname), + ) + group.add_argument( + "--log-max-file-size", + default=10240, + type=int, + help="Maximum size of log file in KB before starting a new file", + ) + group.add_argument( + "--log-max-file-count", + default=3, + type=int, + help="Maximum number of archival log files to keep", + ) diff --git a/Products/ZenCollector/configcache/app/pid.py b/Products/ZenCollector/configcache/app/pid.py new file mode 100644 index 0000000000..02effb5723 --- /dev/null +++ b/Products/ZenCollector/configcache/app/pid.py @@ -0,0 +1,130 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import atexit +import errno +import fcntl +import logging +import os + +from Products.ZenUtils.Utils import zenPath + +log = logging.getLogger("zen.pid") + + +def _add_pid_suffix(v): + if not v.endswith(".pid"): + if not os.path.basename(v): + raise ValueError("no filename for pid file given") + return v + ".pid" + return v + + +def add_pidfile_arguments(parser): + filename = "-".join(parser.prog.split(" ")[:-1]) + ".pid" + dirname = os.path.join(zenPath("var"), "run") + parser.add_argument( + "--pidfile", + default=os.path.join(dirname, filename), + type=_add_pid_suffix, + help="Pathname of the PID file. If a directory path is not " + "specified, the pidfile is save to {}".format(dirname), + ) + + +class pidfile(object): + """ + Write a file containing the current process's PID. + + The context manager yields the PID value to the caller. + """ + + def __init__(self, config): + pidfile = config["pidfile"] + filename = os.path.basename(pidfile) + dirname = os.path.dirname(pidfile) + if not os.path.isdir(dirname): + if not dirname: + dirname = os.path.join(zenPath("var"), "run") + else: + raise RuntimeError( + "not a directory direcory={}".format(dirname) + ) + self._dirname = dirname + self._filename = filename + self.pathname = os.path.join(self._dirname, self._filename) + + def __enter__(self): + self.create() + return self + + def __exit__(self, exc_type=None, exc_value=None, exc_tb=None): + self.close() + + def read(self): + with open(self.pathname, "r") as fp: + return _read_pidfile(fp) + + def create(self): + atexit.register(self.close) + self.pid = os.getpid() + self.fp = open(self.pathname, "a+") + try: + _flock(self.fp.fileno()) + except IOError as ex: + raise RuntimeError( + "pidfile already locked pidfile={} error={}".format( + self.pathname, ex + ) + ) + oldpid = _read_pidfile(self.fp) + if oldpid is not None and pid_exists(oldpid): + raise RuntimeError("PID is still running pid={}".format(oldpid)) + self.fp.seek(0) + self.fp.truncate() + self.fp.write("%d\n" % self.pid) + self.fp.flush() + self.fp.seek(0) + + def close(self): + if not self.fp: + return + try: + self.fp.close() + self.fp = None # so subsequent calls to `close` exit early + except IOError as ex: + if ex.errno != errno.EBADF: + raise + finally: + if os.path.isfile(self.pathname): + os.remove(self.pathname) + + +def pid_exists(pid): + try: + os.kill(pid, 0) + except OSError as ex: + if ex.errno == errno.ESRCH: + # This pid has no matching process + return False + return True + + +def _read_pidfile(fp): + fp.seek(0) + pid_str = fp.read(16).split("\n", 1)[0].strip() + if not pid_str: + return None + return int(pid_str) + + +def _flock(fileno): + fcntl.flock(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB) diff --git a/Products/ZenCollector/configcache/app/zodb.py b/Products/ZenCollector/configcache/app/zodb.py new file mode 100644 index 0000000000..8f90bbf4db --- /dev/null +++ b/Products/ZenCollector/configcache/app/zodb.py @@ -0,0 +1,235 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import contextlib +import os + +import transaction +import ZODB.config + +from AccessControl.SecurityManagement import ( + newSecurityManager, + noSecurityManager, +) +from Products.CMFCore.utils import getToolByName +from ZPublisher.HTTPRequest import HTTPRequest +from ZPublisher.HTTPResponse import HTTPResponse +from ZPublisher.BaseRequest import RequestContainer + +from Products.ZenRelations.ZenPropertyManager import setDescriptors +from Products.ZenUtils.Utils import getObjByPath, zenPath + +_zodb_config_template = """\ +%import relstorage + + cache-size {zodb-cachesize} + + + cache-local-mb {zodb-max-cache-mb} + cache-local-object-max {zodb-cache-max-object-size} + keep-history false + + host {zodb-host} + port {zodb-port} + user {zodb-user} + passwd {zodb-password} + db {zodb-db} + + + +""" + +_default_config_file = os.path.join(zenPath("etc"), "zodb.conf") + + +class _ZODBConnectionDefaults: + host = "localhost" + user = "zenoss" + password = "zenoss" # noqa: S105 + db = "zodb" + port = 3306 + cachesize = 1000 + cache_max_object_size = 1048576 + commit_lock_timeout = 30 + max_cache_mb = 512 + + +def add_zodb_arguments(parser): + """Add ZODB CLI arguments to `parser`.""" + group = parser.add_argument_group("ZODB Options") + group.add_argument( + "--zodb-config-file", + default=_default_config_file, + help="ZODB connection config file" + ) + group.add_argument( + "--zodb-cachesize", + default=_ZODBConnectionDefaults.cachesize, + type=int, + help="Maximum number of objects kept in the cache", + ) + group.add_argument( + "--zodb-host", + default=_ZODBConnectionDefaults.host, + help="Hostname of the MySQL server for ZODB", + ) + group.add_argument( + "--zodb-port", + type=int, + default=_ZODBConnectionDefaults.port, + help="Port of the MySQL server for ZODB", + ) + group.add_argument( + "--zodb-user", + default=_ZODBConnectionDefaults.user, + help="User of the MySQL server for ZODB", + ) + group.add_argument( + "--zodb-password", + default=_ZODBConnectionDefaults.password, + help="Password of the MySQL server for ZODB", + ) + group.add_argument( + "--zodb-db", + default=_ZODBConnectionDefaults.db, + help="Name of database for MySQL object store", + ) + group.add_argument( + "--zodb-cache-max-object-size", + default=_ZODBConnectionDefaults.cache_max_object_size, + type=int, + help="Maximum size of an object stored in the cache (bytes)", + ) + group.add_argument( + "--zodb-commit-lock-timeout", + default=_ZODBConnectionDefaults.commit_lock_timeout, + type=float, + help=( + "Specify the number of seconds a database connection will " + "wait to acquire a database 'commit' lock before failing." + ), + ) + group.add_argument( + "--zodb-max-cache-mb", + default=_ZODBConnectionDefaults.max_cache_mb, + type=int, + help="Maximum size of the cache (megabytes)" + ) + + +@contextlib.contextmanager +def zodb(config): + """ + Context manager managing the connection to ZODB. + + @type config: dict + """ + with contextlib.closing(getDB(config)) as db: + with contextlib.closing(db.open()) as session: + try: + with dataroot(session) as dmd: + yield (db, session, dmd) + finally: + transaction.abort() + + +def getDB(config): + """ + Returns a connection to the ZODB database. + + If specified, the 'zodb-config-file' key in `config` should name a + file containing the ZODB connection configuration in the ZConfig format. + + :param config: Contains configuration data for ZODB connection + :type config: dict + :rtype: :class:`ZODB.DB.DB` + """ + configfile = config.get("zodb-config-file") + if configfile and os.path.isfile(configfile): + url = "file://%s" % configfile + return ZODB.config.databaseFromURL(url) + zodb_config = _getConfigString(config) + return ZODB.config.databaseFromString(zodb_config) + + +@contextlib.contextmanager +def dataroot(session): + """ + Context manager returning the root Zenoss ZODB object from the session. + + The data root is commonly known as the "dmd" object. + + :param session: An active ZODB connection (session) object. + :type session: :class:`ZODB.Connection.Connection` + :rtype: :class:`Products.ZenModel.DataRoot.DataRoot` + """ + root = session.root() + application = _getContext(root["Application"]) + dataroot = getObjByPath(application, "/zport/dmd") + _ = _login(dataroot) + setDescriptors(dataroot) + try: + yield dataroot + finally: + noSecurityManager() + + +def _getConfigString(config): + """ + Returns a ZConfig string to connect to ZODB. + + :rtype: str + """ + return _zodb_config_template.format(**config) + + +def _getContext(app): + resp = HTTPResponse(stdout=None) + env = { + "SERVER_NAME": "localhost", + "SERVER_PORT": "8080", + "REQUEST_METHOD": "GET", + } + req = HTTPRequest(None, env, resp) + return app.__of__(RequestContainer(REQUEST=req)) + + +_default_user = "zenoss_system" + + +def _login(context, userid=_default_user): + """Authenticate user and configure credentials.""" + if userid is None: + userid = _default_user + + user = _getUser(context, userid) + newSecurityManager(None, user) + return user + + +def _getUser(context, userid): + root = context.getPhysicalRoot() + tool = getToolByName(root, "acl_users") + + user = tool.getUserById(userid) + if user is None: + # Try a different tool. + tool = getToolByName(root.zport, "acl_users") + user = tool.getUserById(userid) + + if user is None: + user = tool.getUserById(_default_user) + + if not hasattr(user, "aq_base"): + user = user.__of__(tool) + + return user + + +__all__ = ("zodb", "getDB", "dataroot") diff --git a/Products/ZenCollector/configcache/cache/__init__.py b/Products/ZenCollector/configcache/cache/__init__.py new file mode 100644 index 0000000000..8e79a1578a --- /dev/null +++ b/Products/ZenCollector/configcache/cache/__init__.py @@ -0,0 +1,26 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .model import ( + ConfigStatus, + DeviceKey, + DeviceQuery, + DeviceRecord, + OidMapRecord, +) + +__all__ = ( + "ConfigStatus", + "DeviceKey", + "DeviceQuery", + "DeviceRecord", + "OidMapRecord", +) diff --git a/Products/ZenCollector/configcache/cache/model.py b/Products/ZenCollector/configcache/cache/model.py new file mode 100644 index 0000000000..341f7bf3ec --- /dev/null +++ b/Products/ZenCollector/configcache/cache/model.py @@ -0,0 +1,247 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections + +import attr + +from attr.validators import instance_of + +from Products.ZenCollector.services.config import DeviceProxy + +from .utils import parse_atoms, extract_atoms + + +__all__ = ( + "DeviceKey", + "DeviceQuery", + "DeviceRecord", + "ConfigStatus", + "OidMapRecord", + "KeyConverter", +) + + +@attr.s(frozen=True, slots=True) +class CacheKey(object): + """ + Stub key class for tables that have a fixed (no parameters) key. + """ + + +@attr.s(slots=True) +class OidMapRecord(object): + created = attr.ib(validator=instance_of(float)) + checksum = attr.ib(validator=instance_of(str)) + oidmap = attr.ib(validator=instance_of(dict)) + + @classmethod + def make(cls, created, checksum, oidmap): + return cls(created=created, checksum=checksum, oidmap=oidmap) + + +@attr.s(frozen=True, slots=True) +class DeviceQuery(object): + service = attr.ib(converter=str, validator=instance_of(str), default="*") + monitor = attr.ib(converter=str, validator=instance_of(str), default="*") + device = attr.ib(converter=str, validator=instance_of(str), default="*") + + +@attr.s(frozen=True, slots=True) +class DeviceKey(CacheKey): + service = attr.ib(converter=str, validator=instance_of(str)) + monitor = attr.ib(converter=str, validator=instance_of(str)) + device = attr.ib(converter=str, validator=instance_of(str)) + + +@attr.s(slots=True) +class DeviceRecord(object): + key = attr.ib( + validator=instance_of(DeviceKey), on_setattr=attr.setters.NO_OP + ) + uid = attr.ib(validator=instance_of(str), on_setattr=attr.setters.NO_OP) + updated = attr.ib(validator=instance_of(float)) + config = attr.ib(validator=instance_of(DeviceProxy)) + + @classmethod + def make(cls, svc, mon, dev, uid, updated, config): + return cls(DeviceKey(svc, mon, dev), uid, updated, config) + + @property + def service(self): + return self.key.service + + @property + def monitor(self): + return self.key.monitor + + @property + def device(self): + return self.key.device + + +@attr.s(slots=True) +class _Status(object): + """Base class for status classes.""" + + key = attr.ib(validator=instance_of(CacheKey)) + + +class _ConfigStatus(object): + """ + Namespace class for Current, Retired, Expired, Pending, and Building types. + """ + + @attr.s(slots=True, frozen=True, repr_ns="ConfigStatus") + class Current(_Status): + """The configuration is current.""" + + updated = attr.ib(validator=instance_of(float)) + + @attr.s(slots=True, frozen=True, repr_ns="ConfigStatus") + class Retired(_Status): + """The cofiguration is retired, but not yet expired.""" + + retired = attr.ib(validator=instance_of(float)) + + @attr.s(slots=True, frozen=True, repr_ns="ConfigStatus") + class Expired(_Status): + """The configuration has expired.""" + + expired = attr.ib(validator=instance_of(float)) + + @attr.s(slots=True, frozen=True, repr_ns="ConfigStatus") + class Pending(_Status): + """The configuration is waiting for a rebuild.""" + + submitted = attr.ib(validator=instance_of(float)) + + @attr.s(slots=True, frozen=True, repr_ns="ConfigStatus") + class Building(_Status): + """The configuration is rebuilding.""" + + started = attr.ib(validator=instance_of(float)) + + def __contains__(self, value): + return isinstance( + value, + ( + _ConfigStatus.Current, + _ConfigStatus.Retired, + _ConfigStatus.Expired, + _ConfigStatus.Pending, + _ConfigStatus.Building, + ), + ) + + +ConfigStatus = _ConfigStatus() + + +class KeyConverter(object): + def __init__(self, template, keytype=CacheKey, querytype=None): + self.__template = template + self.__keytype = keytype + self.__querytype = querytype + self.__cache = _Cache() + self.__atoms = parse_atoms(template) + self.__atom_cnt = len(self.__atoms) + + def _atoms(self, raw): + extracted = extract_atoms(raw, ":", self.__atom_cnt) + return dict(zip(self.__atoms, extracted)) + + def to_raw(self, key): + """ + Return an encoded raw key using the content of `key`. + """ + if not isinstance(key, (self.__keytype, self.__querytype)): + return key + hkey = _hashkey(key) + value = self.__cache.get(hkey) + if value is None: + value = self.__template.format(**attr.asdict(key)) + self.__cache[hkey] = value + return value + + def from_raw(self, raw): + """ + Return an instance of keytype using the contents of `raw`. + """ + hkey = _hashkey(raw) + value = self.__cache.get(hkey) + if value is None: + value = self.__keytype(**self._atoms(raw)) + self.__cache[hkey] = value + return value + + def parse(self, raw): + """ + Return a dict containing the parsed components from the raw key. + """ + return self._atoms(raw) + + +class _HashedTuple(tuple): + __hashvalue = None + + def __hash__(self, hash=tuple.__hash__): + hv = self.__hashvalue + if hv is None: + self.__hashvalue = hv = hash(self) + return hv + + def __add__(self, other, add=tuple.__add__): + return _HashedTuple(add(self, other)) + + def __radd__(self, other, add=tuple.__add__): + return _HashedTuple(add(other, self)) + + def __getstate__(self): + return {} + + +_kwmark = (_HashedTuple,) + + +def _hashkey(*args, **kw): + if kw: + return _HashedTuple(args + sum(sorted(kw.items()), _kwmark)) + return _HashedTuple(args) + + +class _Cache(collections.MutableMapping): + def __init__(self, maxsize=128): + self.__data = collections.OrderedDict() + self.__maxsize = maxsize + + def __getitem__(self, key): + # Pop the item and re-add it so that the key moves to the end. + value = self.__data.pop(key) + self.__data[key] = value + return value + + def __setitem__(self, key, value): + maxsize = self.__maxsize + size = len(self.__data) + if size >= maxsize: + nextkey = next(iter(self.__data)) + self.__data.pop(nextkey) + self.__data[key] = value + + def __delitem__(self, key): + del self.__data[key] + + def __len__(self): + return len(self.__data) + + def __iter__(self): + return iter(self.__data) diff --git a/Products/ZenCollector/configcache/cache/storage/__init__.py b/Products/ZenCollector/configcache/cache/storage/__init__.py new file mode 100644 index 0000000000..51d432e77b --- /dev/null +++ b/Products/ZenCollector/configcache/cache/storage/__init__.py @@ -0,0 +1,21 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .device import DeviceConfigStore, DeviceConfigStoreFactory +from .oidmap import OidMapStore, OidMapStoreFactory + + +__all__ = ( + "DeviceConfigStore", + "DeviceConfigStoreFactory", + "OidMapStore", + "OidMapStoreFactory", +) diff --git a/Products/ZenCollector/configcache/cache/storage/device.py b/Products/ZenCollector/configcache/cache/storage/device.py new file mode 100644 index 0000000000..e57e2330d5 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/storage/device.py @@ -0,0 +1,778 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +# Key structure +# ============= +# configcache:device:uid: +# configcache:device:config::: +# configcache:device:age:: [(, ), ...] +# configcache:device:retired:: [(, ), ...] +# configcache:device:expired:: [(, ), ...] +# configcache:device:pending:: [(, ), ...] +# configcache:device:building:: [(, ), ...] +# +# While "device" seems redundant, other values in this position could be +# "threshold" and "property". +# +# * uid - Maps a device to its object path in ZODB +# * config - Maps a key (::) to a configuration +# * age - Stores the timestamp for when the configuration was created +# +# The following keys store the state of a device's config. +# +# * retired - devices with a 'retired' config. +# +# The is a copy from the 'age' key. Since retirement is +# controlled by a z-property, storing the time when the config +# transitioned to 'retire' is not useful because the z-property +# can change dynamically. +# +# * expired - devices with an 'expired' config. +# +# The is the timestamp when the config was expired. +# +# * pending - devices with a 'pending' config +# +# The is the timestamp when the build_device_config job +# was submitted. +# +# * building - devices with a 'building' config +# +# The is the timestamp when the build_device_config job +# began execution. +# +# A device may exist in only one of 'retired', 'expired', 'pending', +# 'building', or none of them. +# +# names the configuration service class used to generate the +# configuration. +# names the monitor (collector) the device belongs to. +# is the ID of the device +# is the object path to the device in ZODB. +# + +from __future__ import absolute_import, print_function, division + +import inspect +import json +import logging +import re +import types +import zlib + +from functools import partial +from itertools import chain + +import attr +import six + +from attr.validators import instance_of +from twisted.spread.jelly import jelly, unjelly +from zope.component.factory import Factory + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from ..model import ( + DeviceKey, + DeviceQuery, + DeviceRecord, + ConfigStatus, + KeyConverter, +) +from ..table import String, SortedSet + +_app = "configcache" +log = logging.getLogger("zen.configcache.storage") + + +class DeviceConfigStoreFactory(Factory): + """ + IFactory implementation for ConfigStore objects. + """ + + def __init__(self): + super(DeviceConfigStoreFactory, self).__init__( + DeviceConfigStore, + "DeviceConfigStore", + "Device Configuration Cache Storage", + ) + + +_uid_template = "{app}:device:uid:{{device}}" +_config_template = "{app}:device:config:{{service}}:{{monitor}}:{{device}}" +_status_template = "{app}:device:{category}:{{service}}:{{monitor}}" + + +class DeviceConfigStore(object): + """ + A device config store. + """ + + @classmethod + def make(cls): + """Create and return a ConfigStore object.""" + client = getRedisClient(url=getRedisUrl()) + return cls(client) + + def __init__(self, client): + """Initialize a ConfigStore instance.""" + self.__client = client + self.__uids = _StringTable( + _uid_template.format(app=_app), keytype=_UIDKey + ) + self.__config = _StringTable(_config_template.format(app=_app)) + self.__age = _SortedSetTable( + _status_template.format(app=_app, category="age") + ) + self.__retired = _SortedSetTable( + _status_template.format(app=_app, category="retired") + ) + self.__expired = _SortedSetTable( + _status_template.format(app=_app, category="expired") + ) + self.__pending = _SortedSetTable( + _status_template.format(app=_app, category="pending") + ) + self.__building = _SortedSetTable( + _status_template.format(app=_app, category="building") + ) + self.__range = type( + "rangefuncs", + (object,), + { + "age": partial(_range, self.__client, self.__age), + "retired": partial(_range, self.__client, self.__retired), + "expired": partial(_range, self.__client, self.__expired), + "pending": partial(_range, self.__client, self.__pending), + "building": partial(_range, self.__client, self.__building), + }, + )() + + def __contains__(self, key): + """ + Returns True if a config for the given key exists. + + @type key: DeviceKey + @rtype: Boolean + """ + return self.__config.exists(self.__client, key) + + def __iter__(self): + """ + Returns an iterable producing the keys for all existing configs. + + @rtype: Iterator[DeviceKey] + """ + return iter( + DeviceKey(**self.__config.parse_rawkey(raw)) + for raw in self.__config.scan(self.__client, DeviceQuery()) + ) + + def search(self, query=None): + """ + Returns the configuration keys matching the search criteria. + + @type query: DeviceQuery + @rtype: Iterator[DeviceKey] + @raises TypeError: Unsupported value given for a field + @raises AttributeError: Unknown field + """ + if query is None: + query = DeviceQuery() + if not isinstance(query, DeviceQuery): + raise TypeError("'{!r} is not a DeviceQuery".format(query)) + return self._query(**attr.asdict(query)) + + def add(self, record): + """ + @type record: DeviceRecord + """ + self._add(record, self._delete_statuses) + + def put_config(self, record): + """ + Updates the config without changing its status. + + @type record: DeviceRecord + """ + self._add(record) + + def _add(self, record, statushandler=lambda *args: None): + svc, mon, dvc, uid, updated, config = _from_record(record) + orphaned_keys = tuple( + key + for key in self._query(service=svc, device=dvc) + if key.monitor != mon + ) + watch_keys = self._get_watch_keys(orphaned_keys + (record.key,)) + stored_uid = self.__uids.get(self.__client, _UIDKey(dvc)) + + def _add_impl(pipe): + pipe.multi() + # Remove configs for this device that exist with a different + # monitor. + # Note: configs produced by different configuration services + # may exist simultaneously. + for key in orphaned_keys: + self.__config.delete(pipe, key) + self.__age.delete(pipe, key, key.device) + self._delete_statuses(pipe, key) + if stored_uid != uid: + self.__uids.set(pipe, _UIDKey(record.key.device), uid) + self.__config.set(pipe, record.key, config) + self.__age.add(pipe, record.key, dvc, updated) + statushandler(pipe, record.key) + + self.__client.transaction(_add_impl, *watch_keys) + + def get_uid(self, deviceId): + """ + Return the ZODB UID (path) for the given device. + + @type deviceId: str + @rtype: str | None + """ + return self.__uids.get(self.__client, _UIDKey(deviceId)) + + def get_uids(self, *deviceids): + """ + Return the ZODB UID (path) for each of the given devices. + + The return value is an iterator producing two element tuples: + + (, ) + + The second element of the tuple has the value None if no UID + exists for the requested device. + + @type deviceids: List[str] + @rtype: Iterator[Tuple[str, str|None]] + """ + keys = tuple(_UIDKey(dvc) for dvc in deviceids) + return ( + (self.__uids.parse_rawkey(raw)["device"], uid) + for raw, uid in self.__uids.mget(self.__client, *keys) + ) + + def get_updated(self, key): + """ + Return the timestamp of when the config was built. + + @type key: DeviceKey + @rtype: float + """ + return _to_ts(self.__age.score(self.__client, key, key.device)) + + def query_updated(self, query=None): + """ + Return the last update timestamp of every configuration selected + by the query. + + @type query: DeviceQuery + @rtype: Iterable[Tuple[DeviceKey, float]] + """ + if query is None: + query = DeviceQuery() + predicate = self._get_device_predicate(query.device) + return ( + (key, ts) + for key, ts in self._get_metadata(self.__age, query) + if predicate(key.device) + ) + + def get(self, key, default=None): + """ + @type key: DeviceKey + @rtype: DeviceRecord + """ + with self.__client.pipeline() as pipe: + self.__config.get(pipe, key) + self.__age.score(pipe, key, key.device) + self.__uids.get(pipe, _UIDKey(key.device)) + conf, score, uid = pipe.execute() + if conf is None: + return default + score = 0 if score < 0 else score + return _to_record( + key.service, key.monitor, key.device, uid, score, conf + ) + + def remove(self, *keys): + """ + Delete the configurations identified by `keys`. + + @type keys: Sequence[DeviceKey] + """ + with self.__client.pipeline() as pipe: + for key in keys: + self.__config.delete(pipe, key) + self.__age.delete(pipe, key, key.device) + self._delete_statuses(pipe, key) + pipe.execute() + + given = {key.device for key in keys} + remaining = { + key.device + for key in chain.from_iterable( + self._query(device=device) for device in given + ) + } + deleted = given - remaining + if deleted: + with self.__client.pipeline() as pipe: + for device in deleted: + self.__uids.delete(pipe, _UIDKey(device)) + pipe.execute() + + def _query(self, service="*", monitor="*", device="*"): + return ( + DeviceKey(**self.__config.parse_rawkey(raw)) + for raw in self.__config.scan( + self.__client, + DeviceQuery(service=service, monitor=monitor, device=device), + ) + ) + + def clear_status(self, *keys): + """ + Removes retired, expired, pending, and building statuses. + + If a config is present, the status becomes current. If no config + is present, then there is no status. + + @type keys: Sequence[DeviceKey] + """ + if len(keys) == 0: + return + + def clear_impl(pipe): + pipe.multi() + for key in keys: + self._delete_statuses(pipe, key) + + watch_keys = self._get_watch_keys(keys) + self.__client.transaction(clear_impl, *watch_keys) + + def _delete_statuses(self, pipe, key): + self.__retired.delete(pipe, key, key.device) + self.__expired.delete(pipe, key, key.device) + self.__pending.delete(pipe, key, key.device) + self.__building.delete(pipe, key, key.device) + + def set_retired(self, *pairs): + """ + Marks the indicated configuration(s) as retired. + + @type keys: Sequence[(DeviceKey, float)] + """ + + def _impl(rows, pipe): + pipe.multi() + for key, ts in rows: + score = _to_score(ts) + self.__retired.add(pipe, key, key.device, score) + self.__expired.delete(pipe, key, key.device) + self.__pending.delete(pipe, key, key.device) + self.__building.delete(pipe, key, key.device) + + self._set_status(pairs, _impl) + + def set_expired(self, *pairs): + """ + Marks the indicated configuration(s) as expired. + + @type keys: Sequence[(DeviceKey, float)] + """ + + def _impl(rows, pipe): + pipe.multi() + for key, ts in rows: + score = _to_score(ts) + self.__retired.delete(pipe, key, key.device) + self.__expired.add(pipe, key, key.device, score) + self.__pending.delete(pipe, key, key.device) + self.__building.delete(pipe, key, key.device) + + self._set_status(pairs, _impl) + + def set_pending(self, *pairs): + """ + Marks configuration(s) as waiting for a new configuration. + + @type pending: Sequence[(DeviceKey, float)] + """ + + def _impl(rows, pipe): + pipe.multi() + for key, ts in rows: + score = _to_score(ts) + self.__retired.delete(pipe, key, key.device) + self.__expired.delete(pipe, key, key.device) + self.__pending.add(pipe, key, key.device, score) + self.__building.delete(pipe, key, key.device) + + self._set_status(pairs, _impl) + + def set_building(self, *pairs): + """ + Marks configuration(s) as building a new configuration. + + @type pairs: Sequence[(DeviceKey, float)] + """ + + def _impl(rows, pipe): + pipe.multi() + for key, ts in rows: + score = _to_score(ts) + self.__retired.delete(pipe, key, key.device) + self.__expired.delete(pipe, key, key.device) + self.__pending.delete(pipe, key, key.device) + self.__building.add(pipe, key, key.device, score) + + self._set_status(pairs, _impl) + + def _set_status(self, pairs, fn): + if len(pairs) == 0: + return + + watch_keys = self._get_watch_keys(key for key, _ in pairs) + # rows = tuple( + # (key.service, key.monitor, key.device, ts) for key, ts in pairs + # ) + + callback = partial(fn, pairs) + self.__client.transaction(callback, *watch_keys) + + def _get_watch_keys(self, keys): + return set( + chain.from_iterable( + ( + self.__age.to_rawkey(key), + self.__retired.to_rawkey(key), + self.__expired.to_rawkey(key), + self.__pending.to_rawkey(key), + self.__building.to_rawkey(key), + ) + for key in keys + ) + ) + + def get_status(self, key): + """ + Returns the current status of the config identified by `key`. + + @type key: DeviceKey + @rtype: ConfigStatus | None + """ + scores = self._get_scores(key) + if not any(scores): + return None + return self._get_status_from_scores(scores, key) + + def query_statuses(self, query=None): + """ + Return all status objects matching the query. + + @type query: DeviceQuery + @rtype: Iterable[ConfigStatus] + """ + if query is None: + query = DeviceQuery() + keys = set() + tables = ( + (self.__expired, ConfigStatus.Expired), + (self.__retired, ConfigStatus.Retired), + (self.__pending, ConfigStatus.Pending), + (self.__building, ConfigStatus.Building), + ) + predicate = self._get_device_predicate(query.device) + + for table, cls in tables: + for key, ts in self._get_metadata(table, query): + if predicate(key.device): + keys.add(key) + yield cls(key, ts) + for key, ts in self._get_metadata(self.__age, query): + # Skip age (aka 'current') data for keys that already have + # some other status. + if key in keys: + continue + if predicate(key.device): + yield ConfigStatus.Current(key, ts) + + def _get_device_predicate(self, spec): + if spec == "*": + return lambda _: True + elif "*" in spec: + expr = spec.replace("*", ".*") + regex = re.compile(expr) + return lambda value: regex.match(value) is not None + else: + return lambda value: value == spec + + def _get_metadata(self, table, query): + return ( + ( + DeviceKey(device=device, **table.parse_rawkey(raw)), + _to_ts(score), + ) + for raw, device, score in table.scan(self.__client, query) + ) + + def _get_status_from_scores(self, scores, key): + age, retired, expired, pending, building = scores + if building is not None: + return ConfigStatus.Building(key, _to_ts(building)) + elif pending is not None: + return ConfigStatus.Pending(key, _to_ts(pending)) + elif expired is not None: + return ConfigStatus.Expired(key, _to_ts(expired)) + elif retired is not None: + return ConfigStatus.Retired(key, _to_ts(retired)) + elif age is not None: + return ConfigStatus.Current(key, _to_ts(age)) + + def get_building(self, service="*", monitor="*"): + """ + Return an iterator producing ConfigStatus.Building objects. + + @rtype: Iterable[ConfigStatus.Building] + """ + query = DeviceQuery(service=service, monitor=monitor) + return ( + ConfigStatus.Building(key, ts) + for key, ts in self.__range.building(query) + ) + + def get_pending(self, service="*", monitor="*"): + """ + Return an iterator producing ConfigStatus.Pending objects. + + @rtype: Iterable[ConfigStatus.Pending] + """ + query = DeviceQuery(service=service, monitor=monitor) + return ( + ConfigStatus.Pending(key, ts) + for key, ts in self.__range.pending(query) + ) + + def get_expired(self, service="*", monitor="*"): + """ + Return an iterator producing ConfigStatus.Expired objects. + + @rtype: Iterable[ConfigStatus.Expired] + """ + query = DeviceQuery(service=service, monitor=monitor) + return ( + ConfigStatus.Expired(key, ts) + for key, ts in self.__range.expired(query) + ) + + def get_retired(self, service="*", monitor="*"): + """ + Return an iterator producing ConfigStatus.Retired objects. + + @rtype: Iterable[ConfigStatus.Retired] + """ + query = DeviceQuery(service=service, monitor=monitor) + return ( + ConfigStatus.Retired(key, ts) + for key, ts in self.__range.retired(query) + ) + + def get_older(self, maxtimestamp, service="*", monitor="*"): + """ + Returns an iterator producing ConfigStatus.Current objects + where current timestamp <= `maxtimestamp`. + + @rtype: Iterable[ConfigStatus.Current] + """ + query = DeviceQuery(service=service, monitor=monitor) + # NOTE: 'older' means timestamps > 0 and <= `maxtimestamp`. + selection = tuple( + (key, age) + for key, age in self.__range.age( + query, minv="(0", maxv=_to_score(maxtimestamp) + ) + ) + for key, age in selection: + scores = self._get_scores(key)[1:] + if any(score is not None for score in scores): + continue + yield ConfigStatus.Current(key, age) + + def get_newer(self, mintimestamp, service="*", monitor="*"): + """ + Returns an iterator producing ConfigStatus.Current objects + where current timestamp > `mintimestamp`. + + @rtype: Iterable[ConfigStatus.Current] + """ + query = DeviceQuery(service=service, monitor=monitor) + # NOTE: 'newer' means timestamps to `maxtimestamp`. + selection = tuple( + (key, age) + for key, age in self.__range.age( + query, minv="(%s" % (_to_score(mintimestamp),) + ) + ) + for key, age in selection: + scores = self._get_scores(key)[1:] + if any(score is not None for score in scores): + continue + yield ConfigStatus.Current(key, age) + + def _get_scores(self, key): + service, monitor, device = attr.astuple(key) + with self.__client.pipeline() as pipe: + self.__age.score(pipe, key, key.device) + self.__retired.score(pipe, key, key.device) + self.__expired.score(pipe, key, key.device) + self.__pending.score(pipe, key, key.device) + self.__building.score(pipe, key, key.device) + return pipe.execute() + + +def _range(client, table, query, minv=None, maxv=None): + pattern = table.to_rawkey(query) + return ( + (DeviceKey(device=device, **table.parse_rawkey(raw)), _to_ts(score)) + for raw, device, score in table.range( + client, pattern, minscore=minv, maxscore=maxv + ) + ) + + +def _deserialize(data): + # Python2's `unicode` built-in won't accept a unicode string when the + # `encoding` parameter is given. Twisted's `unjelly` function assumes + # that a Unicode value is an utf-8-encoded non-unicode string. However, + # by default, all strings from a JSON loader are Unicode strings, so + # Twisted's `unjelly` function fails on the unicode value. + # + # The fix is add a hook to ensure that all strings are converted into + # binary (non-unicode) strings. However, Twisted's jelly format is + # s-expressions, which are basically nested lists, and there's no JSON + # hook for lists. So, wrap the data into a JSON-object (a dict) and + # use a function to customize the decoding. + try: + data = zlib.decompress(data) + except zlib.error: + pass + data = '{{"config":{}}}'.format(data) + return unjelly(json.loads(data, object_hook=_decode_config)) + + +def _decode_config(data): + return _decode_list(data.get("config")) + + +def _decode_list(data): + return [_decode_item(item) for item in data] + + +def _decode_item(item): + if isinstance(item, six.text_type): + return item.encode("utf-8") + elif isinstance(item, list): + return _decode_list(item) + else: + return item + + +def _serialize(config): + return zlib.compress(json.dumps(jelly(config))) + + +def _to_score(ts): + return ts * 1000.0 + + +def _to_ts(score): + return score / 1000.0 + + +def _to_record(svc, mon, dvc, uid, updated, config): + key = DeviceKey(svc, mon, dvc) + updated = _to_ts(updated) + config = _deserialize(config) + return DeviceRecord(key, uid, updated, config) + + +def _from_record(record): + return ( + record.service, + record.monitor, + record.device, + record.uid, + _to_score(record.updated), + _serialize(record.config), + ) + + +@attr.s(frozen=True, slots=True) +class _UIDKey(object): + device = attr.ib(converter=str, validator=instance_of(str)) + + +class _CompositeTable(object): + """ + Composite of a table type and KeyManager. + """ + + def __init__(self, template, tabletype, keytype, querytype): + self.__km = KeyConverter( + template, keytype=keytype, querytype=querytype + ) + self.__table = tabletype() + self.__methods = { + t[0]: t[1] + for t in inspect.getmembers(self.__table) + if not t[0].startswith("_") and isinstance(t[1], types.MethodType) + } + + @property + def keys(self): + return self.__km + + def mget(self, client, *keys): + # The mget method accepts multiple keys and so must be handled + # differently than _callmethod, which accepts only one key. + rawkeys = tuple(self.__km.to_raw(k) for k in keys) + return self.__methods["mget"](client, *rawkeys) + + def to_rawkey(self, key): + return self.__km.to_raw(key) + + def parse_rawkey(self, raw): + return self.__km.parse(raw) + + def __getattr__(self, name): + method = self.__methods.get(name) + if method is None: + raise AttributeError("Attribute not found '{}'".format(name)) + return partial(self._callmethod, method) + + def _callmethod(self, method, client, key, *args, **kw): + rawkey = self.__km.to_raw(key) + return method(client, rawkey, *args, **kw) + + +class _SortedSetTable(_CompositeTable): + def __init__(self, template): + super(_SortedSetTable, self).__init__( + template, SortedSet, DeviceKey, DeviceQuery + ) + + +class _StringTable(_CompositeTable): + def __init__(self, template, keytype=DeviceKey): + super(_StringTable, self).__init__( + template, String, keytype, DeviceQuery + ) diff --git a/Products/ZenCollector/configcache/cache/storage/oidmap.py b/Products/ZenCollector/configcache/cache/storage/oidmap.py new file mode 100644 index 0000000000..50743e08e2 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/storage/oidmap.py @@ -0,0 +1,250 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +# Key structure +# ============= +# configcache:oidmap:config +# configcache:oidmap:state { +# "checksum": ..., +# "created": ..., +# "status": ..., +# "effective": ... +# } +# +# * config - the oid map. +# * hash - the hash of the current oid map +# * created - timestamp of when the oid map was created +# * status - identifies the oid map's status (expired, pending, building) +# * effective - timestamp of when the status took effect +# +# No value for "status" means the status of the oidmap is Current. +# +# "status" is "expired", "pending", or "building" +# + +from __future__ import absolute_import, print_function, division + +import ast +import json +import logging +import zlib + +from twisted.spread.jelly import jelly, unjelly +from zope.component.factory import Factory + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from ..model import CacheKey, ConfigStatus, OidMapRecord +from ..table import Hash, String + +_app = "configcache" +log = logging.getLogger("zen.configcache.storage.oidmap") + + +class OidMapStoreFactory(Factory): + """ + IFactory implementation for OidMapStore objects. + """ + + def __init__(self): + super(OidMapStoreFactory, self).__init__( + OidMapStore, + "OidMapStore", + "OID Map Cache Storage", + ) + + +_template_oidmap = "{app}:oidmap:config" +_template_state = "{app}:oidmap:state" + +_status_map = { + cls.__name__: cls + for cls in ( + ConfigStatus.Expired, + ConfigStatus.Pending, + ConfigStatus.Building, + ) +} + + +class _FieldNames(object): + checksum = "checksum" + created = "created" + status = "status" + effective = "effective" + + +class OidMapStore(object): + """ + An OID map store. + """ + + @classmethod + def make(cls): + """Create and return a OidMapStore object.""" + client = getRedisClient(url=getRedisUrl()) + return cls(client) + + def __init__(self, client): + """Initialize a OidMapStore instance.""" + self.__client = client + self.__oidmap_key = _template_oidmap.format(app=_app) + self.__oids = String() + self.__state_key = _template_state.format(app=_app) + self.__state = Hash() + + def __nonzero__(self): + return self.__client.exists(self.__oidmap_key) + + def remove(self): + with self.__client.pipeline() as pipe: + self.__oids.delete(pipe, self.__oidmap_key) + self.__state.delete(pipe, self.__state_key) + pipe.execute() + + def get_checksum(self): + return self.__state.getfield( + self.__client, self.__state_key, _FieldNames.checksum + ) + + def get_created(self): + created = self.__state.getfield( + self.__client, self.__state_key, _FieldNames.created + ) + if created is not None: + created = float(created) + return created + + def get_status(self): + state = self.__state.get(self.__client, self.__state_key) + if not state: + return None + status_name = state.get(_FieldNames.status) + if status_name is None: + return ConfigStatus.Current( + CacheKey(), float(state[_FieldNames.created]) + ) + status_cls = _status_map.get(status_name) + if status_cls is None: + raise RuntimeError( + "invalid status for oidmap: {}".format(status_name) + ) + return status_cls(CacheKey(), float(state[_FieldNames.effective])) + + def get(self, default=None): + with self.__client.pipeline() as pipe: + self.__oids.get(pipe, self.__oidmap_key) + self.__state.get(pipe, self.__state_key) + oids, state = pipe.execute() + if oids is None: + return default + return _to_record( + state.get(_FieldNames.created), + state.get(_FieldNames.checksum), + oids, + ) + + def add(self, record): + """ + Adds or updates the OidMap and changes the status to Current. + + @type record: OidMapRecord + """ + self._add(record, self._delete_status) + + def put(self, record): + """ + Updates the OidMap without changing its status. + + @type record: OidMapRecord + """ + self._add(record) + + def _add(self, record, statushandler=lambda *args, **kw: None): + created, checksum, oidmap = _from_record(record) + watch_keys = (self.__oidmap_key, self.__state_key) + + def _add_impl(pipe): + pipe.multi() + self.__oids.set(pipe, self.__oidmap_key, oidmap) + self.__state.set( + pipe, + self.__state_key, + {_FieldNames.checksum: checksum, _FieldNames.created: created}, + ) + statushandler(pipe) + + self.__client.transaction(_add_impl, *watch_keys) + + def _delete_status(self, client): + self.__state.deletefields( + client, self.__state_key, _FieldNames.status, _FieldNames.effective + ) + + def set_expired(self, timestamp): + """ + Marks the indicated oidmap(s) as expired. + + @type timestamp: float + """ + self._set_status(timestamp, ConfigStatus.Expired.__name__) + + def set_pending(self, timestamp): + """ + Marks configuration(s) as waiting for a new configuration. + + @type pending: Sequence[(OidMapKey, float)] + """ + self._set_status(timestamp, ConfigStatus.Pending.__name__) + + def set_building(self, timestamp): + """ + Marks configuration(s) as building a new configuration. + + @type pairs: Sequence[(OidMapKey, float)] + """ + self._set_status(timestamp, ConfigStatus.Building.__name__) + + def _set_status(self, timestamp, status_name): + watch_keys = (self.__state_key,) + + def _impl(pipe): + pipe.multi() + self.__state.set( + pipe, + self.__state_key, + { + _FieldNames.status: status_name, + _FieldNames.effective: timestamp, + }, + ) + + self.__client.transaction(_impl, *watch_keys) + + +def _deserialize(data): + return unjelly(ast.literal_eval(zlib.decompress(data))) + + +def _serialize(oidmap): + return zlib.compress(json.dumps(jelly(oidmap))) + + +def _to_record(created, checksum, oidmap): + created = float(created) + oidmap = _deserialize(oidmap) + return OidMapRecord(created, checksum, oidmap) + + +def _from_record(record): + return ( + record.created, + record.checksum, + _serialize(record.oidmap), + ) diff --git a/Products/ZenCollector/configcache/cache/table/__init__.py b/Products/ZenCollector/configcache/cache/table/__init__.py new file mode 100644 index 0000000000..ac04580f0d --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/__init__.py @@ -0,0 +1,19 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from .hash import Hash +from .sortedset import SortedSet +from .string import String + + +__all__ = ( + "Hash", + "SortedSet", + "String", +) diff --git a/Products/ZenCollector/configcache/cache/table/config.py b/Products/ZenCollector/configcache/cache/table/config.py new file mode 100644 index 0000000000..a79944a401 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/config.py @@ -0,0 +1,66 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + + +class _StringTable(object): + """ """ + + def __init__(self, template, scan_page_size=1000): + self.__template = template + self.__scan_count = 1000 + + def make_key(self, **parts): + return self.__template.format(**parts) + + def exists(self, client, **parts): + return client.exists(self.make_key(**parts)) + + def scan(self, client, **parts): + pattern = self.make_key(**parts) + result = client.scan_iter(match=pattern, count=self.__scan_count) + return (tuple(key.rsplit(":", len(parts))[1:]) for key in result) + + def get(self, client, **parts): + key = self.make_key(**parts) + return client.get(key) + + def set(self, client, data, **parts): + key = self.make_key(**parts) + client.set(key, data) + + def delete(self, client, **parts): + key = self.make_key(**parts) + client.delete(key) + + +class OidMapConfigTable(_StringTable): + """ + Manages OidMap data. + """ + + def __init__(self, app, scan_page_size=1000): + super(OidMapConfigTable, self).__init__( + "{app}:oidmap:config:{{service}}:{{monitor}}".format(app=app), + scan_page_size=scan_page_size + ) + + +class DeviceConfigTable(_StringTable): + """ + Manages device configuration data for a specific configuration service. + """ + + def __init__(self, app, scan_page_size=1000): + """Initialize a DeviceConfigTable instance.""" + super(DeviceConfigTable, self).__init__( + "{app}:device:config:{{service}}:{{monitor}}:{{device}}".format( + app=app + ), + scan_page_size=scan_page_size + ) diff --git a/Products/ZenCollector/configcache/cache/table/hash.py b/Products/ZenCollector/configcache/cache/table/hash.py new file mode 100644 index 0000000000..e7fe220ed4 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/hash.py @@ -0,0 +1,69 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + + +class Hash(object): + """A key/value store for hash data, e.g. nested key/value data.""" + + def __init__(self, scan_page_size=1000): + self.__scan_count = scan_page_size + + def exists(self, client, key, field=None): + """ + If `fields` is given, returns True if the named field is present + in the key. If `fields` is None, returns True if the key exists. + + @type client: RedisClient + @type field: str | None + @type atoms: Map[str, str] + @rtype: Boolean + """ + if field: + return client.hexists(key, field) + return bool(client.exists(key)) + + def scan(self, client, pattern): + """ + Returns an iterable producing tuples containing the atoms of + keys matching `atoms`. + """ + result = client.scan_iter(match=pattern, count=self.__scan_count) + return (key for key in result) + + def get(self, client, key): + """ + Returns the mapping stored in the key specified by `atoms`. + """ + result = client.hgetall(key) + return result if result else None + + def getfield(self, client, key, field): + """ + Returns the value of the `field` store in the key given by `atoms`. + """ + return client.hget(key, field) + + def set(self, client, key, mapping): + """ + Use `mapping` to set or replace fields found in the key. + + @type mapping: Mapping[str, Union[bytes, str, int, float]] + """ + client.hset(key, mapping=mapping) + + def delete(self, client, key): + client.delete(key) + + def deletefields(self, client, key, *fields): + """ + Delete the specified field from the key. + """ + client.hdel(key, *fields) diff --git a/Products/ZenCollector/configcache/cache/table/metadata.py b/Products/ZenCollector/configcache/cache/table/metadata.py new file mode 100644 index 0000000000..222186b366 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/metadata.py @@ -0,0 +1,114 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + + +class ConfigMetadataTable(object): + """ + Manages the mapping of device configurations to monitors. + + Configuration IDs are mapped to service ID/monitor ID pairs. + + A Service ID/monitor ID pair are used as a key to retrieve the + Configuration IDs mapped to the pair. + """ + + def __init__(self, app, category): + """Initialize a ConfigMetadataStore instance.""" + self.__template = ( + "{app}:device:{category}:{{service}}:{{monitor}}".format( + app=app, category=category + ) + ) + self.__scan_count = 1000 + + def make_key(self, service, monitor): + return self.__template.format(service=service, monitor=monitor) + + def get_pairs(self, client, service="*", monitor="*"): + pattern = self.make_key(service, monitor) + return ( + key.rsplit(":", 2)[1:] + for key in client.scan_iter(match=pattern, count=self.__scan_count) + ) + + def scan(self, client, pairs): + """ + Return an iterable of tuples of (service, monitor, device, score). + + @type client: redis client + @type pairs: Iterable[Tuple[str, str]] + @rtype Iterator[Tuple[str, str, str, float]] + """ + return ( + (service, monitor, dvc, score) + for service, monitor in pairs + for dvc, score in client.zscan_iter( + self.make_key(service, monitor), count=self.__scan_count + ) + ) + + def range(self, client, pairs, maxscore=None, minscore=None): + """ + Return an iterable of tuples of (service, monitor, device, score). + + @type client: redis client + @type pairs: Iterable[Tuple[str, str]] + @type minscore: Union[float, None] + @type maxscore: Union[float, None] + @rtype Iterator[Tuple[str, str, str, float]] + """ + maxv = maxscore if maxscore is not None else "+inf" + minv = minscore if minscore is not None else "-inf" + return ( + (service, monitor, device, score) + for service, monitor in pairs + for device, score in client.zrangebyscore( + self.make_key(service, monitor), minv, maxv, withscores=True + ) + ) + + def exists(self, client, service, monitor, device): + """Return True if a score for the key and device exists. + + @type client: RedisClient + @type service: str + @type monitor: str + @type device: str + """ + key = self.make_key(service, monitor) + return client.zscore(key, device) is not None + + def add(self, client, service, monitor, device, score): + """ + Add a (device, score) -> (monitor, serviceid) mapping. + This method will replace any existing mapping for device. + + @type client: RedisClient + @type service: str + @type monitor: str + @type device: str + @type score: float + """ + key = self.make_key(service, monitor) + client.zadd(key, {device: score}) + + def score(self, client, service, monitor, device): + """ + Returns the timestamp associated with the device ID. + Returns None of the device ID is not found. + """ + key = self.make_key(service, monitor) + return client.zscore(key, device) + + def delete(self, client, service, monitor, device): + """ + Removes a device from a (service, monitor) key. + """ + key = self.make_key(service, monitor) + client.zrem(key, device) diff --git a/Products/ZenCollector/configcache/cache/table/sortedset.py b/Products/ZenCollector/configcache/cache/table/sortedset.py new file mode 100644 index 0000000000..2a466d1cd9 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/sortedset.py @@ -0,0 +1,101 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + + +class SortedSet(object): + """ + Manages data stored as sorted sets. + + For each key, multiple values can be stored, each with its own score. + The score determines the sort order of the values in the key. + """ + + def __init__(self, scan_page_size=1000): + self.__scan_count = scan_page_size + + def scan(self, client, pattern): + """ + Return an iterator of tuples of (key, value, score). + + @type client: redis client + @type pattern: str + @rtype Iterator[Tuple] + """ + return ( + (key, value, score) + for key in self._keys(client, pattern) + for value, score in client.zscan_iter(key, count=self.__scan_count) + ) + + def range(self, client, pattern, maxscore=None, minscore=None): + """ + Return an iterable of tuples of (key, value, score). + + @type client: redis client + @type pattern: str + @type minscore: Union[float, None] + @type maxscore: Union[float, None] + @rtype Iterator[Tuple[*str, float]] + """ + maxv = maxscore if maxscore is not None else "+inf" + minv = minscore if minscore is not None else "-inf" + return ( + (key, value, score) + for key in self._keys(client, pattern) + for value, score in client.zrangebyscore( + key, minv, maxv, withscores=True + ) + ) + + def exists(self, client, key, value): + """ + Return True if a score for `value` exists in `key`. + + @type client: RedisClient + @type key: str + @type value: str + @rtype: Boolean + """ + return client.zscore(key, value) is not None + + def add(self, client, key, value, score): + """ + Sets a `score` for the `value` in `key`. + + @type client: RedisClient + @type key: str + @type value: str + @type score: float + """ + client.zadd(key, {value: score}) + + def score(self, client, key, value): + """ + Returns the score associated with `value` from `key`. + Returns None if no score is found. + + @type client: RedisClient + @type key: str + @type value: str + """ + return client.zscore(key, value) + + def delete(self, client, key, value): + """ + Removes a `value` from `key`. + """ + client.zrem(key, value) + + def _keys(self, client, pattern): + return ( + key + for key in client.scan_iter(match=pattern, count=self.__scan_count) + ) diff --git a/Products/ZenCollector/configcache/cache/table/string.py b/Products/ZenCollector/configcache/cache/table/string.py new file mode 100644 index 0000000000..5b49c32dd9 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/string.py @@ -0,0 +1,79 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from ..utils import batched + + +class String(object): + """A key/value store for string data.""" + + def __init__(self, scan_page_size=1000, mget_page_size=100): + self.__scan_count = scan_page_size + self.__mget_page = mget_page_size + + def exists(self, client, key): + """ + Returns True if the `key` is present in Redis. + + @rtype: boolean + """ + return bool(client.exists(key)) + + def scan(self, client, pattern): + """ + Returns an iterator producing keys that match `pattern`. + + @type pattern: str + @rtype: Iterator[str] + """ + result = client.scan_iter(match=pattern, count=self.__scan_count) + return (key for key in result) + + def mget(self, client, *keys): + """ + Returns an iterator producing key/value pairs for each key in `keys`. + + @type keys: Sequence[str] + @rtype: Iterator[Tuple[str, str | None]] + """ + return ( + (key, value) + for batch in batched(keys, self.__mget_page) + for key, value in zip(batch, client.mget(*batch)) + ) + + def get(self, client, key): + """ + Returns the value corresponding to `key`. + Returns None if `key` is not found. + + @type key: str + @rtype: str | None + """ + return client.get(key) + + def set(self, client, key, value): + """ + Sets the `value` for `key`. + + @type key: str + @type value: str | int | float + @rtype: str | None + """ + client.set(key, value) + + def delete(self, client, key): + """ + Removes the value associated with `key`. + + @type key: str + """ + client.delete(key) diff --git a/Products/ZenCollector/configcache/cache/table/tests/__init__.py b/Products/ZenCollector/configcache/cache/table/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenCollector/configcache/cache/table/tests/test_hash.py b/Products/ZenCollector/configcache/cache/table/tests/test_hash.py new file mode 100644 index 0000000000..96be7fa51b --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/tests/test_hash.py @@ -0,0 +1,94 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from unittest import TestCase + +from Products.Jobber.tests.utils import RedisLayer # , subTest + +from ..hash import Hash + + +class TestHashTable(TestCase): + """Test the Hash table class.""" + + layer = RedisLayer + + def setUp(t): + t.key = "foo:bar" + t.table = Hash() + + def tearDown(t): + del t.table + + def test_no_data(t): + field = "f1" + t.assertFalse(t.table.exists(t.layer.redis, t.key)) + t.assertFalse(t.table.exists(t.layer.redis, t.key, field)) + t.assertEqual(0, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertIsNone(t.table.get(t.layer.redis, t.key)) + t.assertIsNone(t.table.getfield(t.layer.redis, t.key, field)) + t.assertIsNone(t.table.delete(t.layer.redis, t.key)) + + def test_add_data(t): + mapping = {"f1": "cookie", "f2": 2343.2} + + expected_get = {"f1": "cookie", "f2": "2343.2"} + expected_scan = (t.key,) + + t.table.set(t.layer.redis, t.key, mapping) + + scan = tuple(t.table.scan(t.layer.redis, t.key)) + get = t.table.get(t.layer.redis, t.key) + f1 = t.table.getfield(t.layer.redis, t.key, "f1") + + t.assertTrue(t.table.exists(t.layer.redis, t.key)) + t.assertTrue(t.table.exists(t.layer.redis, t.key, "f1")) + t.assertTrue(t.table.exists(t.layer.redis, t.key, "f2")) + t.assertTupleEqual(expected_scan, scan) + t.assertDictEqual(expected_get, get) + t.assertEqual(mapping["f1"], f1) + + def test_add_more_data(t): + mapping = {"f1": "cookie", "f2": 2343.2} + t.table.set(t.layer.redis, t.key, mapping) + + updated = {"f2": 1234.87, "f3": "baz"} + t.table.set(t.layer.redis, t.key, updated) + + expected_get = {"f1": "cookie", "f2": "1234.87", "f3": "baz"} + + get = t.table.get(t.layer.redis, t.key) + + t.assertDictEqual(expected_get, get) + + def test_getfield(t): + mapping = {"f1": "cookie", "f2": 2343.2} + t.table.set(t.layer.redis, t.key, mapping) + + f1 = t.table.getfield(t.layer.redis, t.key, "f1") + f2 = t.table.getfield(t.layer.redis, t.key, "f2") + + t.assertEqual(mapping["f1"], f1) + t.assertEqual(str(mapping["f2"]), f2) + + def test_delete_data(t): + mapping = {"f1": "cookie", "f2": 2343.2} + t.table.set(t.layer.redis, t.key, mapping) + + t.table.delete(t.layer.redis, t.key) + + t.assertFalse(t.table.exists(t.layer.redis, t.key)) + t.assertFalse(t.table.exists(t.layer.redis, t.key, "f1")) + t.assertFalse(t.table.exists(t.layer.redis, t.key, "f2")) + t.assertEqual(0, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertIsNone(t.table.get(t.layer.redis, t.key)) + t.assertIsNone(t.table.getfield(t.layer.redis, t.key, "f1")) + t.assertIsNone(t.table.getfield(t.layer.redis, t.key, "f2")) diff --git a/Products/ZenCollector/configcache/cache/table/tests/test_sortedset.py b/Products/ZenCollector/configcache/cache/table/tests/test_sortedset.py new file mode 100644 index 0000000000..338e8c88e8 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/tests/test_sortedset.py @@ -0,0 +1,192 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from unittest import TestCase + +from Products.Jobber.tests.utils import RedisLayer # , subTest + +from ..sortedset import SortedSet + + +class TestSortedSetTable(TestCase): + """Test the SortedSet table class.""" + + layer = RedisLayer + + def setUp(t): + t.key = "app:cat1:foo:bar" + t.table = SortedSet() + + def tearDown(t): + del t.table + + def test_no_data(t): + value = "c" + t.assertFalse(t.table.exists(t.layer.redis, t.key, value)) + t.assertEqual(0, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertEqual(0, len(tuple(t.table.range(t.layer.redis, t.key)))) + t.assertIsNone(t.table.score(t.layer.redis, t.key, value)) + t.assertIsNone(t.table.delete(t.layer.redis, t.key, value)) + + def test_add_data(t): + value = "baz" + score = 10.5 + + t.table.add(t.layer.redis, t.key, value, score) + + scan = tuple(t.table.scan(t.layer.redis, t.key)) + rng = tuple(t.table.range(t.layer.redis, t.key)) + expected = ((t.key, value, score),) + + t.assertTrue(t.table.exists(t.layer.redis, t.key, value)) + t.assertTupleEqual(expected, scan) + t.assertTupleEqual(expected, rng) + t.assertEqual(t.table.score(t.layer.redis, t.key, value), score) + + def test_add_more_data(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + scan = tuple(t.table.scan(t.layer.redis, t.key)) + rng = tuple(t.table.range(t.layer.redis, t.key)) + expected = ( + (t.key, data[0][0], data[0][1]), + (t.key, data[1][0], data[1][1]), + ) + + t.assertTrue(t.table.exists(t.layer.redis, t.key, data[0][0])) + t.assertTrue(t.table.exists(t.layer.redis, t.key, data[1][0])) + t.assertTupleEqual(expected, scan) + t.assertTupleEqual(expected, rng) + t.assertEqual( + t.table.score(t.layer.redis, t.key, data[0][0]), data[0][1] + ) + t.assertEqual( + t.table.score(t.layer.redis, t.key, data[1][0]), data[1][1] + ) + + def test_add_different_data(t): + key2 = "app:cat1:foo:bar2" + pattern = "app:cat1:foo:*" + data1 = ("baz", 10.5) + data2 = ("fab", 12.23) + + t.table.add(t.layer.redis, t.key, data1[0], data1[1]) + t.table.add(t.layer.redis, key2, data2[0], data2[1]) + + scan = tuple( + sorted( + t.table.scan(t.layer.redis, pattern), key=lambda x: x[1] + ) + ) + rng = tuple( + sorted( + t.table.range(t.layer.redis, pattern), + key=lambda x: x[1], + ) + ) + expected = ( + (t.key, data1[0], data1[1]), + (key2, data2[0], data2[1]), + ) + + t.assertTrue(t.table.exists(t.layer.redis, t.key, data1[0])) + t.assertTrue(t.table.exists(t.layer.redis, key2, data2[0])) + t.assertTupleEqual(expected, scan) + t.assertTupleEqual(expected, rng) + t.assertEqual( + t.table.score(t.layer.redis, t.key, data1[0]), data1[1] + ) + t.assertEqual( + t.table.score(t.layer.redis, key2, data2[0]), data2[1] + ) + + def test_minscore(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + rng = tuple(t.table.range(t.layer.redis, t.key, minscore=11)) + expected = ((t.key, data[1][0], data[1][1]),) + + t.assertTupleEqual(expected, rng) + + def test_minscore_equality(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + rng = tuple(t.table.range(t.layer.redis, t.key, minscore=12.23)) + expected = ((t.key, data[1][0], data[1][1]),) + + t.assertTupleEqual(expected, rng) + + def test_maxscore(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + rng = tuple(t.table.range(t.layer.redis, t.key, maxscore=11)) + expected = ((t.key, data[0][0], data[0][1]),) + + t.assertTupleEqual(expected, rng) + + def test_maxscore_equality(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + rng = tuple(t.table.range(t.layer.redis, t.key, maxscore=10.5)) + expected = ((t.key, data[0][0], data[0][1]),) + + t.assertTupleEqual(expected, rng) + + def test_range_full_bounding(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + rng = tuple( + t.table.range(t.layer.redis, t.key, minscore=11, maxscore=13) + ) + expected = ((t.key, data[1][0], data[1][1]),) + + t.assertTupleEqual(expected, rng) + + def test_range_inverted_range(t): + data = [("baz", 10.5), ("fab", 12.23)] + + t.table.add(t.layer.redis, t.key, data[0][0], data[0][1]) + t.table.add(t.layer.redis, t.key, data[1][0], data[1][1]) + + rng = tuple( + t.table.range(t.layer.redis, t.key, maxscore=11, minscore=13) + ) + expected = () + t.assertTupleEqual(expected, rng) + + def test_delete_data(t): + value = "baz" + score = 10.5 + + t.table.add(t.layer.redis, t.key, value, score) + t.table.delete(t.layer.redis, t.key, value) + + t.assertFalse(t.table.exists(t.layer.redis, t.key, value)) + t.assertEqual(0, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertIsNone(t.table.score(t.layer.redis, t.key, value)) diff --git a/Products/ZenCollector/configcache/cache/table/tests/test_string.py b/Products/ZenCollector/configcache/cache/table/tests/test_string.py new file mode 100644 index 0000000000..444e9973f3 --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/tests/test_string.py @@ -0,0 +1,89 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from unittest import TestCase + +from Products.Jobber.tests.utils import RedisLayer # , subTest + +from ..string import String + + +class TestStringTable(TestCase): + """Test the String table class.""" + + layer = RedisLayer + + def setUp(t): + t.key = "app:cat1:foo:bar" + t.table = String() + + def tearDown(t): + del t.table + + def test_no_data(t): + t.assertFalse(t.table.exists(t.layer.redis, t.key)) + t.assertEqual(0, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertIsNone(t.table.get(t.layer.redis, t.key)) + t.assertIsNone(t.table.delete(t.layer.redis, t.key)) + + def test_add_data(t): + data = "This is some data" + + t.table.set(t.layer.redis, t.key, data) + + t.assertTrue(t.table.exists(t.layer.redis, t.key)) + t.assertEqual(1, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertEqual(t.table.get(t.layer.redis, t.key), data) + + def test_add_different_data(t): + key2 = "app:cat1:foo:bar2" + data = "This is some data" + pattern = "app:cat1:foo:*" + + t.table.set(t.layer.redis, t.key, data) + t.table.set(t.layer.redis, key2, data) + + scan = sorted(t.table.scan(t.layer.redis, pattern)) + expected = sorted((t.key, key2)) + + t.assertTrue(t.table.exists(t.layer.redis, t.key)) + t.assertTrue(t.table.exists(t.layer.redis, key2)) + t.assertListEqual(expected, scan) + t.assertEqual(t.table.get(t.layer.redis, t.key), data) + t.assertEqual(t.table.get(t.layer.redis, key2), data) + + def test_delete_data(t): + data = "This is some data" + + t.table.set(t.layer.redis, t.key, data) + t.table.delete(t.layer.redis, t.key) + + t.assertFalse(t.table.exists(t.layer.redis, t.key)) + t.assertEqual(0, len(tuple(t.table.scan(t.layer.redis, t.key)))) + t.assertIsNone(t.table.get(t.layer.redis, t.key)) + + def test_mget(t): + import string + + template = "app:cat1:foo:{}" + keys = tuple( + template.format(letter) for letter in string.ascii_lowercase + ) + datum = tuple( + "/the/letter/{}".format(letter) + for letter in string.ascii_lowercase + ) + table = String(mget_page_size=5) + for key, value in zip(keys, datum): + table.set(t.layer.redis, key, value) + result = tuple(table.mget(t.layer.redis, *keys)) + t.assertEqual(len(string.ascii_lowercase), len(result)) + t.assertTupleEqual(tuple(zip(keys, datum)), result) diff --git a/Products/ZenCollector/configcache/cache/table/uid.py b/Products/ZenCollector/configcache/cache/table/uid.py new file mode 100644 index 0000000000..7942b60fcd --- /dev/null +++ b/Products/ZenCollector/configcache/cache/table/uid.py @@ -0,0 +1,71 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + + +class DeviceUIDTable(object): + """ + Manages mapping device names to their ZODB UID. + """ + + def __init__(self, app, scan_page_size=1000, mget_page_size=10): + """Initialize a DeviceUIDTable instance.""" + self.__template = "{app}:device:uid:{{device}}".format(app=app) + self.__scan_count = scan_page_size + self.__mget_count = mget_page_size + + def make_key(self, device): + return self.__template.format(device=device) + + def exists(self, client, device): + """Return True if configuration data exists for the given ID. + + :param device: The ID of the device + :type device: str + :rtype: boolean + """ + return client.exists(self.make_key(device)) + + def scan(self, client, device="*"): + """ + Return an iterable of tuples of device names. + """ + pattern = self.make_key(device) + result = client.scan_iter(match=pattern, count=self.__scan_count) + return (key.rsplit(":", 1)[-1] for key in result) + + def get(self, client, device): + """Return the UID of the given device name. + + :type device: str + :rtype: str + """ + key = self.make_key(device) + return client.get(key) + + def set(self, client, device, uid): + """Insert or replace the UID for the given device. + + :param device: The ID of the configuration + :type device: str + :param uid: The ZODB UID of the device + :type uid: str + :raises: ValueError + """ + key = self.make_key(device) + client.set(key, uid) + + def delete(self, client, *devices): + """Delete one or more keys. + + This method does not fail if the key doesn't exist. + + :type uids: Sequence[str] + """ + keys = tuple(self.make_key(dvc) for dvc in devices) + client.delete(*keys) diff --git a/Products/ZenCollector/configcache/cache/utils.py b/Products/ZenCollector/configcache/cache/utils.py new file mode 100644 index 0000000000..f23d54f78c --- /dev/null +++ b/Products/ZenCollector/configcache/cache/utils.py @@ -0,0 +1,49 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from itertools import islice +from string import Formatter + + +def parse_atoms(template): + """ + Returns the named placeholders from a template string. + """ + return tuple(nm for _, nm, _, _ in Formatter().parse(template) if nm) + + +def extract_atoms(value, sep, count): + """ + Returns the last `count` values from the string `value`. + """ + return tuple(value.rsplit(sep, count)[1:]) + + +# Adapted from docs.python.org/3.11/library/itertools.html +def batched(iterable, n): + """ + Batch data into tuples of length `n`. The last batch may be shorter. + + >>> list(batched('ABCDEFG', 3)) + [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)] + """ + if n < 1: + raise ValueError("n must be greater than zero") + itr = iter(iterable) + while True: + batch = tuple(islice(itr, n)) + if not batch: + break + yield batch + # + # Note: In Python 3.7+, the above loop would be written as + # while (batch := tuple(islice(itr, n))): + # yield batch diff --git a/Products/ZenCollector/configcache/cli/__init__.py b/Products/ZenCollector/configcache/cli/__init__.py new file mode 100644 index 0000000000..66cd5296d0 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/__init__.py @@ -0,0 +1,16 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .device import Device +from .oidmap import OidMap + + +__all__ = ("Device", "OidMap") diff --git a/Products/ZenCollector/configcache/cli/_groups.py b/Products/ZenCollector/configcache/cli/_groups.py new file mode 100644 index 0000000000..db5c98e53d --- /dev/null +++ b/Products/ZenCollector/configcache/cli/_groups.py @@ -0,0 +1,231 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import, division + +from collections import defaultdict +from itertools import chain + +import attr + +from ._stats import UniqueCountStat + + +class DeviceGroup(object): + + name = "devices" + order = 1 + + def __init__(self, stats): + # Only one row, so use summary + self._summary = tuple(s() for s in stats) + try: + # DeviceGroup doesn't want CountStat + posn = stats.index(UniqueCountStat) + except ValueError: + # Not found, so don't worry about it + self._counter = None + self._otherstats = self._summary + else: + # Found, replace it with UniqueCountStat + self._counter = self._summary[posn] + self._otherstats = self._summary[0:posn] + self._summary[posn+1:] + self._stats = stats + self._samples = 0 + + def handle_key(self, key): + if self._counter is None: + return + self._counter.mark(key.device) + self._samples += 1 + + def handle_timestamp(self, key, ts): + for stat in self._otherstats: + stat.mark(ts) + self._samples += 1 + + def handle_status(self, status): + pass + + def headings(self): + return [s.name for s in self._stats] + + def hints(self): + return [s.type_ for s in self._stats] + + def rows(self): + return [] + + def summary(self): + if self._samples == 0: + return [] + return [s.value() for s in self._summary] + + +class ServiceGroup(object): + + name = "services" + order = 2 + + def __init__(self, stats): + self._stats = stats + self._byrow = defaultdict(self._makerowvalue) + self._summary = tuple(s() for s in stats) + self._samples = 0 + + def _makerowvalue(self): + return tuple(stat() for stat in self._stats) + + def handle_key(self, key): + pass + + def handle_timestamp(self, key, ts): + for stat in self._byrow[key.service]: + stat.mark(ts) + for stat in self._summary: + stat.mark(ts) + self._samples += 1 + + def handle_status(self, status): + pass + + def headings(self): + headings = ["configuration service class"] + headings.extend(s.name for s in self._stats) + return headings + + def hints(self): + hints = ["str"] + hints.extend(s.type_ for s in self._stats) + return hints + + def rows(self): + if self._samples == 0: + return [] + return ( + self._makerow(svcname, stats) + for svcname, stats in self._byrow.iteritems() + ) + + def _makerow(self, svcname, stats): + return tuple(chain((svcname,), (s.value() for s in stats))) + + def summary(self): + if self._samples == 0: + return [] + return [s.value() for s in self._summary] + + +class MonitorGroup(object): + + name = "monitors" + order = 3 + + def __init__(self, stats): + self._stats = stats + self._byrow = defaultdict(self._makerowvalue) + self._summary = tuple(s() for s in stats) + self._samples = 0 + + def _makerowvalue(self): + return tuple(stat() for stat in self._stats) + + def handle_key(self, key): + pass + + def handle_timestamp(self, key, ts): + for stat in self._byrow[key.monitor]: + stat.mark(ts) + for stat in self._summary: + stat.mark(ts) + self._samples += 1 + + def handle_status(self, status): + pass + + def headings(self): + headings = ["collector"] + headings.extend(s.name for s in self._stats) + return headings + + def hints(self): + hints = ["str"] + hints.extend(s.type_ for s in self._stats) + return hints + + def rows(self): + if self._samples == 0: + return [] + return ( + self._makerow(name, stats) + for name, stats in self._byrow.iteritems() + ) + + def _makerow(self, name, stats): + return tuple(chain((name,), (s.value() for s in stats))) + + def summary(self): + if self._samples == 0: + return [] + return [s.value() for s in self._summary] + + +class StatusGroup(object): + + name = "statuses" + order = 4 + + def __init__(self, stats): + self._stats = stats + self._byrow = defaultdict(self._makerowvalue) + self._summary = tuple(s() for s in stats) + self._samples = 0 + + def _makerowvalue(self): + return tuple(stat() for stat in self._stats) + + def handle_key(self, key): + pass + + def handle_timestamp(self, key, ts): + pass + + def handle_status(self, status): + data = attr.astuple(status) + for stat in self._byrow[type(status).__name__]: + stat.mark(data[-1]) + for stat in self._summary: + stat.mark(data[-1]) + self._samples += 1 + + def headings(self): + headings = ["status"] + headings.extend(s.name for s in self._stats) + return headings + + def hints(self): + hints = ["str"] + hints.extend(s.type_ for s in self._stats) + return hints + + def rows(self): + if self._samples == 0: + return [] + return ( + self._makerow(name, stats) + for name, stats in self._byrow.iteritems() + ) + + def _makerow(self, name, stats): + return tuple(chain((name,), (s.value() for s in stats))) + + def summary(self): + if self._samples == 0: + return [] + return [s.value() for s in self._summary] diff --git a/Products/ZenCollector/configcache/cli/_json.py b/Products/ZenCollector/configcache/cli/_json.py new file mode 100644 index 0000000000..da7fc53d9b --- /dev/null +++ b/Products/ZenCollector/configcache/cli/_json.py @@ -0,0 +1,78 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import, division + +import json + + +class JSONOutput(object): + """ + { + "devices": [ + "summary" : { + "number_of_devices": 4, + ... + } + ], + "services": { + "data": [ + {: , ... }, ... + ], + "summary": { + : , # except first column + ... + } + }, + "monitors": { + "data": [ + {: , ... }, ... + ], + "summary": { + : , # except first column + ... + } + }, + "statuses": { + "data": [ + {: , ... }, ... + ], + "summary": { + : , # except first column + ... + } + } + } + """ + + def write(self, *groups): + result = {} + for group in groups: + rows = list(group.rows()) + summary = group.summary() + headings = [ + hdr.replace(" ", "_").lower() for hdr in group.headings() + ] + + if len(rows) == 0 and len(summary) == 0: + continue + + if len(headings) == 1 and len(rows) == 1: + result[group.name] = [ + {headings[0].replace(" ", "_").lower(): rows[0][0]} + ] + continue + + rows = [dict(zip(headings, row)) for row in rows] + if len(rows) == 0: + summary = dict(zip(headings, summary)) + else: + summary = dict(zip(headings[1:], summary)) + result[group.name] = {"data": rows, "summary": summary} + print(json.dumps(result)) diff --git a/Products/ZenCollector/configcache/cli/_selection.py b/Products/ZenCollector/configcache/cli/_selection.py new file mode 100644 index 0000000000..3090944dbe --- /dev/null +++ b/Products/ZenCollector/configcache/cli/_selection.py @@ -0,0 +1,86 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import enum +import six + +__all__ = ("get_message", "confirm") + + +def get_message(action, monitor, service): + mon_selection = _Selection.select(monitor) + svc_selection = _Selection.select(service) + mesg = _messages.get((mon_selection, svc_selection), _default_message) + return mesg.format(act=action, mon=monitor, svc=service) + + +def confirm(mesg): + response = None + prompt = "{}. Are you sure (y/N)? ".format(mesg) + while response not in ["y", "n", ""]: + response = six.moves.input(prompt).lower() + return response == "y" + + +class _Selection(enum.Enum): + All = "All" + Some = "Some" + One = "One" + + @classmethod + def select(cls, arg): + return cls.All if arg == "*" else cls.Some if "*" in arg else cls.One + + +def _build_message_lookup(): + AllMon = AllSvc = _Selection.All + SomeMon = SomeSvc = _Selection.Some + OneMon = OneSvc = _Selection.One + return { + (AllMon, AllSvc): ("{act} all device configurations"), + (AllMon, SomeSvc): ( + "{act} all device configurations created by all " + "services matching '{svc}'" + ), + (AllMon, OneSvc): ( + "{act} all device configurations created by the '{svc}' service" + ), + (SomeMon, AllSvc): ( + "{act} all configurations for devices monitored by all " + "collectors matching '{mon}'" + ), + (SomeMon, SomeSvc): ( + "{act} all configurations device monitored by all " + "collectors matching '{mon}' and created by all services " + "matching '{svc}'" + ), + (SomeMon, OneSvc): ( + "{act} all configurations created by the '{svc}' " + "service for devices monitored by all collectors " + "matching '{mon}'" + ), + (OneMon, AllSvc): ( + "{act} all configurations for devices monitored by the " + "'{mon}' collector" + ), + (OneMon, SomeSvc): ( + "{act} all configurations for devices monitored by the " + "'{mon}' collector and created by all services matching '{svc}'" + ), + (OneMon, OneSvc): ( + "{act} all configurations for devices monitored by the " + "'{mon}' collector and created by the '{svc}' service" + ), + } + + +_messages = _build_message_lookup() +_default_message = "collector '%s' service '%s'" diff --git a/Products/ZenCollector/configcache/cli/_stats.py b/Products/ZenCollector/configcache/cli/_stats.py new file mode 100644 index 0000000000..e2f0f2aa52 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/_stats.py @@ -0,0 +1,181 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import, division + +import sys +import time + +_current_time = None + + +def _current_time_unset(): + global _current_time + _current_time = time.time() + try: + return _current_time + finally: + global _get_current_time + _get_current_time = _current_time_set + + +def _current_time_set(): + global _current_time + return _current_time + + +_get_current_time = _current_time_unset + + +class CountStat(object): + + name = "count" + type_ = "int" + + def __init__(self): + self._count = 0 + + def mark(self, *args): + self._count += 1 + + def value(self): + return self._count + + +class UniqueCountStat(CountStat): + + name = "count of devices" + + def __init__(self): + self._values = set() + + def mark(self, value): + self._values.add(value) + + def value(self): + return len(self._values) + + +class AverageStat(object): + + name = "average" + type_ = "timedelta" + + def __init__(self): + self._total = 0 + self._count = 0 + + def mark(self, value): + self._count += 1 + self._total += value + + def value(self): + if self._count == 0: + return 0 + return self._total / self._count + + +class AverageAgeStat(AverageStat): + + name = "average age" + + def value(self): + avg = super(AverageAgeStat, self).value() + if avg == 0: + return 0 + return _get_current_time() - avg + + +class MedianStat(object): + + name = "median" + type_ = "timedelta" + + def __init__(self): + self._min = sys.maxsize + self._max = 0 + + def mark(self, value): + value = int(value) + self._min = min(self._min, value) + self._max = max(self._max, value) + + def value(self): + if self._min == sys.maxsize: + return 0 + return (self._min + self._max) / 2 + + +class MedianAgeStat(MedianStat): + + name = "median age" + + def value(self): + median = super(MedianAgeStat, self).value() + if median == 0: + return 0 + return _get_current_time() - median + + +class MinStat(object): + + name = "min" + type_ = "float" + + def __init__(self): + self._min = sys.maxsize + + def mark(self, value): + self._min = min(self._min, int(value)) + + def value(self): + if self._min == sys.maxsize: + return 0 + return self._min + + +class MaxAgeStat(MinStat): + + name = "max age" + type_ = "timedelta" + + def value(self): + maxv = super(MaxAgeStat, self).value() + if maxv == 0: + return 0 + return _get_current_time() - maxv + + +class MaxStat(object): + + name = "max" + type_ = "float" + + def __init__(self): + self._max = 0 + + def mark(self, value): + self._max = max(self._max, int(value)) + + def value(self): + if self._max == 0: + return 0 + return self._max + + +class MinAgeStat(MaxStat): + + name = "min age" + type_ = "timedelta" + + def value(self): + minv = super(MinAgeStat, self).value() + if minv == 0: + return 0 + return _get_current_time() - minv diff --git a/Products/ZenCollector/configcache/cli/_tables.py b/Products/ZenCollector/configcache/cli/_tables.py new file mode 100644 index 0000000000..83675fd7a5 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/_tables.py @@ -0,0 +1,124 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import, division + +import datetime + +from itertools import chain + + +class TablesOutput(object): + + def write(self, *groups): + for group in groups: + self._display( + list(group.rows()), + group.summary(), + group.headings(), + group.hints(), + ) + + def _display(self, rows, summary, headings, hints): + if not rows and not summary: + return + + # Transform row values for presentation + if rows: + rows = [ + tuple(_xform(value, hint) for value, hint in zip(row, hints)) + for row in sorted(rows, key=lambda x: x[0]) + ] + + # Transform total values for presentation + if summary: + if rows: + summary = tuple( + _xform(v, h) for v, h in zip([""] + summary, hints) + ) + else: + summary = tuple( + _xform(v, h) for v, h in zip(summary, hints) + ) + + # Transform column headers for presentation + if summary and not rows: + headings = [hdr.capitalize() for hdr in headings] + else: + headings = [hdr.upper() for hdr in headings] + + # Initialize maxwidth values for each column + maxwidths = [0 for _ in headings] + + if summary and not rows: + hdrmaxw = max(len(hdr) for hdr in headings) + maxwidths = [hdrmaxw] * len(headings) + else: + for row in rows: + for idx, (mw, col) in enumerate(zip(maxwidths, row)): + maxwidths[idx] = max(mw, len(str(col))) + for idx, (mw, hd) in enumerate(zip(maxwidths, headings)): + maxwidths[idx] = max(mw, len(hd)) + for idx, (mw, tv) in enumerate(zip(maxwidths[1:], summary)): + maxwidths[idx + 1] = max(mw, len(str(tv))) + + offset = len(maxwidths) + tmplt = " ".join( + "{{{0}:{{{1}}}}}".format(idx, idx + offset) + for idx in range(0, offset) + ) + fmtspecs = [ + _get_fmt_spec(mw, hint) for mw, hint in zip(maxwidths, hints) + ] + print() + if summary and not rows: + for hdr, value in zip(headings, summary): + print("{0:{2}}: {1}".format(hdr, value, maxwidths[0])) + else: + if headings: + print(tmplt.format(*chain(headings, fmtspecs))) + sep = ["-" * c for c in maxwidths] + print(tmplt.format(*chain(sep, maxwidths))) + + for row in rows: + print(tmplt.format(*chain(row, fmtspecs))) + + if summary: + print(tmplt.format(*chain(sep, maxwidths))) + print(tmplt.format(*chain(summary, fmtspecs))) + + +def _xform(value, hint): + if hint == "timedelta": + td = datetime.timedelta(seconds=value) + hours = td.seconds // 3600 + minutes = (td.seconds - (hours * 3600)) // 60 + seconds = td.seconds - (hours * 3600) - (minutes * 60) + return "{0} {1:02}:{2:02}:{3:02}".format( + ( + "" + if td.days == 0 + else "{} day{}".format(td.days, "" if td.days == 1 else "s") + ), + hours, + minutes, + seconds, + ).strip() + else: + return value + + +def _get_fmt_spec(mw, hint): + if hint == "int": + return ">{}".format(mw) + elif hint == "timedelta": + return ">{}".format(mw) + elif hint == "float": + return ">{}.2f".format(mw) + return mw diff --git a/Products/ZenCollector/configcache/cli/args.py b/Products/ZenCollector/configcache/cli/args.py new file mode 100644 index 0000000000..7a56e2837c --- /dev/null +++ b/Products/ZenCollector/configcache/cli/args.py @@ -0,0 +1,88 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import argparse + +import six + + +class MultiChoice(argparse.Action): + """Allow multiple values for a choice option.""" + + def __init__(self, option_strings, dest, **kwargs): + kwargs["type"] = self._split_listed_choices + super(MultiChoice, self).__init__(option_strings, dest, **kwargs) + + @property + def choices(self): + return self._choices_checker + + @choices.setter + def choices(self, values): + self._choices_checker = _ChoicesChecker(values) + + def _split_listed_choices(self, value): + if "," in value: + return tuple(value.split(",")) + return value + + def __call__(self, parser, namespace, values=None, option_string=None): + if isinstance(values, six.string_types): + values = (values,) + setattr(namespace, self.dest, values) + + +class _ChoicesChecker(object): + def __init__(self, values): + self._choices = values + + def __contains__(self, value): + if isinstance(value, (list, tuple)): + return all(v in self._choices for v in value) + else: + return value in self._choices + + def __iter__(self): + return iter(self._choices) + + +_devargs_parser = None + + +def get_devargs_parser(): + global _devargs_parser + if _devargs_parser is None: + _devargs_parser = argparse.ArgumentParser(add_help=False) + _devargs_parser.add_argument( + "-m", + "--collector", + type=str, + default="*", + help="Name of the performance collector. Supports simple '*' " + "wildcard comparisons. A lone '*' selects all collectors.", + ) + _devargs_parser.add_argument( + "-s", + "--service", + type=str, + default="*", + help="Name of the configuration service. Supports simple '*' " + "wildcard comparisons. A lone '*' selects all services.", + ) + _devargs_parser.add_argument( + "device", + nargs="*", + default=argparse.SUPPRESS, + help="Name of the device. Multiple devices may be specified. " + "Supports simple '*' wildcard comparisons. Not specifying a " + "device will select all devices.", + ) + return _devargs_parser diff --git a/Products/ZenCollector/configcache/cli/device.py b/Products/ZenCollector/configcache/cli/device.py new file mode 100644 index 0000000000..99a142bc31 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/device.py @@ -0,0 +1,36 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from ..app.args import get_subparser + +from .expire import ExpireDevice +from .list import ListDevice +from .remove import RemoveDevice +from .show import ShowDevice +from .stats import StatsDevice + + +class Device(object): + description = "Manage the device configuration cache" + + @staticmethod + def add_arguments(parser, subparsers): + devicep = get_subparser( + subparsers, + "device", + description=Device.description, + ) + device_subparsers = devicep.add_subparsers(title="Device Subcommands") + ExpireDevice.add_arguments(devicep, device_subparsers) + ListDevice.add_arguments(devicep, device_subparsers) + RemoveDevice.add_arguments(devicep, device_subparsers) + ShowDevice.add_arguments(devicep, device_subparsers) + StatsDevice.add_arguments(devicep, device_subparsers) diff --git a/Products/ZenCollector/configcache/cli/expire.py b/Products/ZenCollector/configcache/cli/expire.py new file mode 100644 index 0000000000..5518eea346 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/expire.py @@ -0,0 +1,128 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import sys +import time + +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from ..app import initialize_environment +from ..app.args import get_subparser +from ..cache import ConfigStatus, DeviceQuery + +from .args import get_devargs_parser +from ._selection import get_message, confirm + + +class ExpireOidMap(object): + description = "Mark OID Map as expired" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "expire", + description=ExpireOidMap.description, + ) + subp.set_defaults(factory=ExpireOidMap) + + def __init__(self, args): + pass + + def run(self): + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("oidmapcache-store", client) + status = store.get_status() + if not isinstance(status, ConfigStatus.Expired): + store.set_expired(time.time()) + print("Expired oidmap configuration") + else: + print("Oidmap configuration already expired") + + +class ExpireDevice(object): + description = "Mark device configurations as expired" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "expire", + description=ExpireDevice.description, + parent=get_devargs_parser(), + ) + subp.set_defaults(factory=ExpireDevice) + + def __init__(self, args): + self._monitor = args.collector + self._service = args.service + self._devices = getattr(args, "device", []) + + def run(self): + haswildcard = any("*" in d for d in self._devices) + if haswildcard: + if len(self._devices) > 1: + print( + "Only one DEVICE argument supported when a " + "wildcard is used.", + file=sys.stderr, + ) + return + if not self._confirm_inputs(): + print("exit") + return + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("deviceconfigcache-store", client) + self._expire(store, self._get(store, haswildcard)) + + def _get(self, store, haswildcard): + query = self._make_query(haswildcard) + results = store.query_statuses(query) + return tuple(self._get_keys_from_results(results, haswildcard)) + + def _expire(self, store, keys): + now = time.time() + store.set_expired(*((key, now) for key in keys)) + count = len(keys) + print( + "expired %d device configuration%s" + % (count, "" if count == 1 else "s") + ) + + def _make_query(self, haswildcard): + if haswildcard: + return DeviceQuery( + service=self._service, + monitor=self._monitor, + device=self._devices[0], + ) + return DeviceQuery(service=self._service, monitor=self._monitor) + + def _get_keys_from_results(self, results, haswildcard): + if not self._devices or haswildcard: + return (status.key for status in results) + return ( + status.key + for status in results + if status.key.device in self._devices + ) + + def _confirm_inputs(self): + if self._devices: + return True + mesg = get_message("Recreate", self._monitor, self._service) + return confirm(mesg) diff --git a/Products/ZenCollector/configcache/cli/list.py b/Products/ZenCollector/configcache/cli/list.py new file mode 100644 index 0000000000..32dea7fb4b --- /dev/null +++ b/Products/ZenCollector/configcache/cli/list.py @@ -0,0 +1,197 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import argparse +import sys +import time + +from datetime import datetime, timedelta +from itertools import chain + +import attr + +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from ..app import initialize_environment +from ..app.args import get_subparser +from ..cache import ConfigStatus, DeviceQuery + +from .args import get_devargs_parser, MultiChoice + + +class ListDevice(object): + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + listp = get_subparser( + subparsers, + "list", + description="List device configurations", + parent=get_devargs_parser(), + ) + listp.add_argument( + "-u", + dest="show_uid", + default=False, + action="store_true", + help="Display ZODB path for device", + ) + listp.add_argument( + "-f", + dest="states", + action=MultiChoice, + choices=("current", "retired", "expired", "pending", "building"), + default=argparse.SUPPRESS, + help="Only list configurations having these states. One or " + "more states may be specified, separated by commas.", + ) + listp.set_defaults(factory=ListDevice) + + def __init__(self, args): + self._monitor = args.collector + self._service = args.service + self._showuid = args.show_uid + self._devices = getattr(args, "device", []) + state_names = getattr(args, "states", ()) + if state_names: + states = set() + for name in state_names: + states.add(_name_state_lookup[name]) + self._states = tuple(states) + else: + self._states = () + + def run(self): + haswildcard = any("*" in d for d in self._devices) + if haswildcard and len(self._devices) > 1: + print( + "Only one DEVICE argument supported when a wildcard is used.", + file=sys.stderr, + ) + return + initialize_environment(configs=self.configs, useZope=False) + self._display(*self._collate(*self._get(haswildcard))) + + def _get(self, haswildcard): + client = getRedisClient(url=getRedisUrl()) + store = createObject("deviceconfigcache-store", client) + query = self._make_query(haswildcard) + statuses = tuple(self._filter(store.query_statuses(query))) + uid_map = self._get_uidmap(store, statuses) + return (statuses, uid_map) + + def _collate(self, statuses, uid_map): + rows = [] + maxd, maxs, maxt, maxa, maxm = 1, 1, 1, 1, 1 + now = time.time() + for status in sorted( + statuses, key=lambda x: (x.key.device, x.key.service) + ): + devid = ( + status.key.device + if (status.key.device not in uid_map) + else uid_map[status.key.device] + ) + status_text = _format_status(status) + ts = attr.astuple(status)[-1] + ts_text = _format_date(ts) + age_text = _format_timedelta(now - ts) + maxd = max(maxd, len(devid)) + maxs = max(maxs, len(status_text)) + maxt = max(maxt, len(ts_text)) + maxa = max(maxa, len(age_text)) + maxm = max(maxm, len(status.key.monitor)) + rows.append( + ( + devid, + status_text, + ts_text, + age_text, + status.key.monitor, + status.key.service, + ) + ) + return rows, (maxd, maxs, maxt, maxa, maxm) + + def _display(self, rows, widths): + if rows: + print(_header_template.format(*chain(_headings, widths))) + for row in rows: + print(_row_template.format(*chain(row, widths))) + + def _make_query(self, haswildcard): + if haswildcard or len(self._devices) == 1: + return DeviceQuery( + service=self._service, + monitor=self._monitor, + device=self._devices[0], + ) + return DeviceQuery(service=self._service, monitor=self._monitor) + + def _filter(self, data): + if self._states: + data = ( + status for status in data if isinstance(status, self._states) + ) + if len(self._devices) > 1: + data = ( + status for status in data if status.key.device in self._devices + ) + return data + + def _get_uidmap(self, store, data): + if self._showuid: + deviceids = tuple(status.key.device for status in data) + uids = store.get_uids(*deviceids) + return dict(uids) + return {} + + +_header_template = "{0:{6}} {1:{7}} {2:^{8}} {3:^{9}} {4:{10}} {5}" +_row_template = "{0:{6}} {1:{7}} {2:{8}} {3:>{9}} {4:{10}} {5}" +_headings = ("DEVICE", "STATUS", "LAST CHANGE", "AGE", "COLLECTOR", "SERVICE") + +_name_state_lookup = { + "current": ConfigStatus.Current, + "retired": ConfigStatus.Retired, + "expired": ConfigStatus.Expired, + "pending": ConfigStatus.Pending, + "building": ConfigStatus.Building, +} + + +def _format_timedelta(value): + td = timedelta(seconds=value) + hours = td.seconds // 3600 + minutes = (td.seconds - (hours * 3600)) // 60 + seconds = td.seconds - (hours * 3600) - (minutes * 60) + return "{0} {1:02}:{2:02}:{3:02}".format( + ( + "" + if td.days == 0 + else "{} day{}".format(td.days, "" if td.days == 1 else "s") + ), + hours, + minutes, + seconds, + ).strip() + + +def _format_status(status): + return type(status).__name__.lower() + + +def _format_date(ts): + when = datetime.fromtimestamp(ts) + return when.strftime("%Y-%m-%d %H:%M:%S") diff --git a/Products/ZenCollector/configcache/cli/oidmap.py b/Products/ZenCollector/configcache/cli/oidmap.py new file mode 100644 index 0000000000..b178134837 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/oidmap.py @@ -0,0 +1,32 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from ..app.args import get_subparser + +from .expire import ExpireOidMap +from .show import ShowOidMap +from .stats import StatsOidMap + + +class OidMap(object): + description = "Manage the OID Map cache" + + @staticmethod + def add_arguments(parser, subparsers): + oidmapp = get_subparser( + subparsers, + "oidmap", + description=OidMap.description, + ) + oidmap_subparsers = oidmapp.add_subparsers(title="OidMap Subcommands") + ExpireOidMap.add_arguments(oidmapp, oidmap_subparsers) + ShowOidMap.add_arguments(oidmapp, oidmap_subparsers) + StatsOidMap.add_arguments(oidmapp, oidmap_subparsers) diff --git a/Products/ZenCollector/configcache/cli/remove.py b/Products/ZenCollector/configcache/cli/remove.py new file mode 100644 index 0000000000..f3c46ce28e --- /dev/null +++ b/Products/ZenCollector/configcache/cli/remove.py @@ -0,0 +1,126 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import sys + +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from ..app import initialize_environment +from ..app.args import get_subparser +from ..cache import DeviceQuery + +from .args import get_devargs_parser +from ._selection import get_message, confirm + + +class RemoveOidMap(object): + description = "Remove oidmap configuration from the cache" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "oidmap", + description=RemoveOidMap.description, + ) + subp.set_defaults(factory=RemoveOidMap) + + def __init__(self, args): + pass + + def run(self): + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("oidmapcache-store", client) + status = store.get_status() + if status is None: + print("No oidmap configuration found in the cache") + else: + store.remove() + print("Oidmap configuration removed from the cache") + + +class RemoveDevice(object): + description = "Delete device configurations from the cache" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "remove", + description=RemoveDevice.description, + parent=get_devargs_parser(), + ) + subp.set_defaults(factory=RemoveDevice) + + def __init__(self, args): + self._monitor = args.collector + self._service = args.service + self._devices = getattr(args, "device", []) + + def run(self): + haswildcard = any("*" in d for d in self._devices) + if haswildcard: + if len(self._devices) > 1: + print( + "Only one DEVICE argument supported when a " + "wildcard is used.", + file=sys.stderr, + ) + return + if not self._confirm_inputs(): + print("exit") + return + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("deviceconfigcache-store", client) + self._remove(store, self._get(store, haswildcard)) + + def _get(self, store, haswildcard): + query = self._make_query(haswildcard) + results = store.query_statuses(query) + return tuple(self._get_keys_from_results(results, haswildcard)) + + def _remove(self, store, keys): + store.remove(*keys) + count = len(keys) + print( + "deleted %d device configuration%s" + % (count, "" if count == 1 else "s") + ) + + def _make_query(self, haswildcard): + if haswildcard: + return DeviceQuery( + service=self._service, + monitor=self._monitor, + device=self._devices[0], + ) + return DeviceQuery(service=self._service, monitor=self._monitor) + + def _get_keys_from_results(self, results, haswildcard): + if not self._devices or haswildcard: + return (status.key for status in results) + return ( + status.key + for status in results + if status.key.device in self._devices + ) + + def _confirm_inputs(self): + if self._devices: + return True + mesg = get_message("Delete", self._monitor, self._service) + return confirm(mesg) diff --git a/Products/ZenCollector/configcache/cli/show.py b/Products/ZenCollector/configcache/cli/show.py new file mode 100644 index 0000000000..acf05eb968 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/show.py @@ -0,0 +1,194 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import os +import sys +import types + +from IPython.lib import pretty +from twisted.spread.jelly import unjellyableRegistry +from twisted.spread import pb +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl +from Products.ZenUtils.terminal_size import get_terminal_size + +from ..app import initialize_environment +from ..app.args import get_subparser +from ..cache import DeviceQuery + + +class ShowOidMap(object): + description = "Show the oidmap configuration" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "show", + description=ShowOidMap.description, + ) + termsize = get_terminal_size() + subp.add_argument( + "--width", + type=int, + default=termsize.columns, + help="Maxiumum number of columns to use in the output. " + "By default, this is the width of the terminal", + ) + subp.set_defaults(factory=ShowOidMap) + + def __init__(self, args): + if _is_output_redirected(): + # when stdout is redirected, default to 79 columns unless + # the --width option has a non-default value. + termsize = get_terminal_size() + if args.width != termsize.columns: + self._columns = args.width + else: + self._columns = 79 + else: + self._columns = args.width + + def run(self): + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("oidmapcache-store", client) + record = store.get() + if record is None: + print( + "No oidmap configuration found in the cache", file=sys.stderr + ) + else: + pretty.pprint( + record.oidmap, max_width=self._columns, max_seq_length=0 + ) + + +class ShowDevice(object): + description = "Show a device configuration" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, "show", description=ShowDevice.description + ) + termsize = get_terminal_size() + subp.add_argument( + "--width", + type=int, + default=termsize.columns, + help="Maxiumum number of columns to use in the output. " + "By default, this is the width of the terminal", + ) + subp.add_argument( + "service", nargs=1, help="name of the configuration service" + ) + subp.add_argument( + "collector", nargs=1, help="name of the performance collector" + ) + subp.add_argument("device", nargs=1, help="name of the device") + subp.set_defaults(factory=ShowDevice) + + def __init__(self, args): + self._monitor = args.collector[0] + self._service = args.service[0] + self._device = args.device[0] + if _is_output_redirected(): + # when stdout is redirected, default to 79 columns unless + # the --width option has a non-default value. + termsize = get_terminal_size() + if args.width != termsize.columns: + self._columns = args.width + else: + self._columns = 79 + else: + self._columns = args.width + + def run(self): + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("deviceconfigcache-store", client) + results, err = _query_cache( + store, + service=self._service, + monitor=self._monitor, + device=self._device, + ) + if results: + for cls in set(unjellyableRegistry.values()): + if isinstance( + cls, (types.ClassType, types.TypeType) + ) and issubclass(cls, (pb.Copyable, pb.RemoteCopy)): + pretty.for_type(cls, _pp_hide_passwords) + else: + pretty.for_type(cls, _pp_default) + try: + pretty.pprint( + results.config, max_width=self._columns, max_seq_length=0 + ) + except IOError as ex: + if ex.errno != 32: # broken pipe + print(ex, file=sys.stderr) + except KeyboardInterrupt as ex: + print(ex, file=sys.stderr) + else: + print(err, file=sys.stderr) + + +def _query_cache(store, service, monitor, device): + query = DeviceQuery(service=service, monitor=monitor, device=device) + results = store.search(query) + first_key = next(results, None) + if first_key is None: + return (None, "device configuration not found") + second_key = next(results, None) + if second_key is not None: + return (None, "more than one device configuration matched arguments") + return (store.get(first_key), None) + + +def _pp_hide_passwords(obj, p, cycle): + _printer( + obj, + p, + cycle, + lambda k, v: v if "password" not in k.lower() else "******", + ) + + +def _pp_default(obj, p, cycle): + _printer(obj, p, cycle, lambda k, v: v) + + +def _printer(obj, p, cycle, vprint): + clsname = obj.__class__.__name__ + if cycle: + p.text("<{}: ...>".format(clsname)) + else: + with p.group(2, "<{}: ".format(clsname), ">"): + attrs = ( + (k, v) + for k, v in sorted(obj.__dict__.items(), key=lambda x: x[0]) + if v not in (None, "", {}, []) + ) + for idx, (k, v) in enumerate(attrs): + if idx: + p.text(",") + p.breakable() + p.text("{}=".format(k)) + p.pretty(vprint(k, v)) + + +def _is_output_redirected(): + return os.fstat(0) != os.fstat(1) diff --git a/Products/ZenCollector/configcache/cli/stats.py b/Products/ZenCollector/configcache/cli/stats.py new file mode 100644 index 0000000000..44be1aea36 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/stats.py @@ -0,0 +1,211 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import, division + +import argparse +import sys +import time + +import attr + +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from ..app import initialize_environment +from ..app.args import get_subparser +from ..cache import DeviceQuery + +from .args import get_devargs_parser, MultiChoice +from ._tables import TablesOutput, _xform +from ._json import JSONOutput +from ._stats import ( + AverageAgeStat, + CountStat, + MaxAgeStat, + MedianAgeStat, + MinAgeStat, + UniqueCountStat, +) +from ._groups import DeviceGroup, ServiceGroup, MonitorGroup, StatusGroup + + +class StatsOidMap(object): + description = "Show the statistics of the oidmap configuration" + configs = (("store.zcml", __name__),) + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "stats", + description=StatsOidMap.description, + ) + subp.set_defaults(factory=StatsOidMap) + + def __init__(self, args): + pass + + def run(self): + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("oidmapcache-store", client) + record = store.get() + status = store.get_status() + if record is None and status is None: + print("No oidmap found in the cache.") + else: + now = time.time() + if record is not None: + age = now - record.created + print("Oidmap Age: {}".format(_xform(age, "timedelta"))) + else: + print("no oidmap") + if status is not None: + status_text = type(status).__name__ + print("Status: {}".format(status_text)) + ts = attr.astuple(status)[-1] + age = now - ts + print("Status Age: {}".format(_xform(age, "timedelta"))) + + +class StatsDevice(object): + description = "Show statistics about the device configurations" + configs = (("store.zcml", __name__),) + + _groups = ("collector", "device", "service", "status") + _statistics = ("count", "avg_age", "median_age", "min_age", "max_age") + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, + "stats", + StatsDevice.description, + parent=get_devargs_parser(), + ) + subp.add_argument( + "-S", + dest="statistic", + action=MultiChoice, + choices=StatsDevice._statistics, + default=argparse.SUPPRESS, + help="Specify the statistics to return. One or more statistics " + "may be specified (comma separated). By default, all " + "statistics are returned.", + ) + subp.add_argument( + "-G", + dest="group", + action=MultiChoice, + choices=StatsDevice._groups, + default=argparse.SUPPRESS, + help="Specify the statistics groupings to return. One or more " + "groupings may be specified (comma separated). By default, all " + "groupings are returned.", + ) + subp.add_argument( + "-f", + dest="format", + choices=("tables", "json"), + default="tables", + help="Output statistics in the specified format", + ) + subp.set_defaults(factory=StatsDevice) + + def __init__(self, args): + stats = [ + _name_stat_map.get(statId) + for statId in getattr(args, "statistic", StatsDevice._statistics) + ] + self._groups = [ + _make_statgroup(groupId, stats) + for groupId in getattr(args, "group", StatsDevice._groups) + ] + if args.format == "tables": + self._format = TablesOutput() + elif args.format == "json": + self._format = JSONOutput() + self._monitor = args.collector + self._service = args.service + self._devices = getattr(args, "device", []) + + def run(self): + haswildcard = any("*" in d for d in self._devices) + if haswildcard and len(self._devices) > 1: + print( + "Only one DEVICE argument supported when a wildcard is used.", + file=sys.stderr, + ) + return + initialize_environment(configs=self.configs, useZope=False) + client = getRedisClient(url=getRedisUrl()) + store = createObject("deviceconfigcache-store", client) + + if haswildcard: + query = DeviceQuery(self._service, self._monitor, self._devices[0]) + else: + query = DeviceQuery(self._service, self._monitor) + included = _get_device_predicate(self._devices, haswildcard) + for key, ts in store.query_updated(query): + if not included(key.device): + continue + for group in self._groups: + group.handle_key(key) + group.handle_timestamp(key, ts) + for status in store.query_statuses(query): + if not included(status.key.device): + continue + for group in self._groups: + group.handle_status(status) + + self._format.write( + *(group for group in sorted(self._groups, key=lambda x: x.order)) + ) + + +def _make_statgroup(groupId, stats): + if groupId == "collector": + return MonitorGroup(stats) + + if groupId == "device": + try: + # DeviceGroup doesn't want CountStat + posn = stats.index(CountStat) + except ValueError: + # Not found, so don't worry about it + dg_stats = stats + pass + else: + # Found, replace it with UniqueCountStat + dg_stats = list(stats) + dg_stats[posn] = UniqueCountStat + return DeviceGroup(dg_stats) + + if groupId == "service": + return ServiceGroup(stats) + + if groupId == "status": + return StatusGroup(stats) + + +_name_stat_map = { + "count": CountStat, + "avg_age": AverageAgeStat, + "median_age": MedianAgeStat, + "min_age": MinAgeStat, + "max_age": MaxAgeStat, +} + + +def _get_device_predicate(devices, haswildcard): + if haswildcard or len(devices) == 0: + return lambda x: True + return lambda x: next((True for d in devices if x == d), False) diff --git a/Products/ZenCollector/configcache/cli/store.zcml b/Products/ZenCollector/configcache/cli/store.zcml new file mode 100644 index 0000000000..8ec2993701 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/store.zcml @@ -0,0 +1,13 @@ + + + + + + + + diff --git a/Products/ZenCollector/configcache/cli/tests/__init__.py b/Products/ZenCollector/configcache/cli/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenCollector/configcache/cli/tests/test_expire.py b/Products/ZenCollector/configcache/cli/tests/test_expire.py new file mode 100644 index 0000000000..6b634a36b3 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/tests/test_expire.py @@ -0,0 +1,222 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections + +from unittest import TestCase + +from Products.ZenCollector.services.config import DeviceProxy +from Products.Jobber.tests.utils import RedisLayer + +from ...cache import DeviceKey, DeviceRecord +from ...cache.storage import DeviceConfigStore +from ..expire import ExpireDevice + + +_fields = collections.namedtuple( + "_fields", "service monitor device uid updated" +) + +PATH = {"src": "Products.ZenCollector.configcache.cli.list"} + + +class _BaseTest(TestCase): + # Base class to share setup code + + layer = RedisLayer + + fields = ( + _fields("svc1", "mon1", "abc-01", "/abc-01", 1234500.0), + _fields("svc1", "mon1", "abc-02", "/abc-02", 1234550.0), + _fields("svc2", "mon1", "efg-01", "/efg-01", 1234550.0), + ) + + def setUp(t): + DeviceProxy.__eq__ = _compare_configs + t.store = DeviceConfigStore(t.layer.redis) + t.config1 = _make_config("abc-01", "_abc_01", "abef394c") + t.config2 = _make_config("abc-02", "_abc_02", "fbd987ba") + t.config3 = _make_config("efg-01", "_efg_01", "39da34cf") + t.record1 = DeviceRecord.make( + t.fields[0].service, + t.fields[0].monitor, + t.fields[0].device, + t.fields[0].uid, + t.fields[0].updated, + t.config1, + ) + t.record2 = DeviceRecord.make( + t.fields[1].service, + t.fields[1].monitor, + t.fields[1].device, + t.fields[1].uid, + t.fields[1].updated, + t.config2, + ) + t.record3 = DeviceRecord.make( + t.fields[2].service, + t.fields[2].monitor, + t.fields[2].device, + t.fields[2].uid, + t.fields[2].updated, + t.config3, + ) + t.store.add(t.record1) + t.store.add(t.record2) + t.store.add(t.record3) + + def tearDown(t): + del t.store + del t.config1 + del t.config2 + del t.config3 + del t.record1 + del t.record2 + del t.record3 + del DeviceProxy.__eq__ + + +_Args = collections.namedtuple("_Args", "service collector device") + + +class ExpireDeviceTest(_BaseTest): + """Test the ExpireDevice class.""" + + def test_no_args(t): + args = _Args("*", "*", []) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(3, len(keys)) + keys = sorted(keys) + for n in range(3): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_matched_device(t): + args = _Args("*", "*", ["abc*"]) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, True) + t.assertEqual(2, len(keys)) + keys = sorted(keys) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_unmatched_device(t): + args = _Args("*", "*", ["abc"]) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_multiple_devices(t): + args = _Args("*", "*", ["abc-01", "abc-02"]) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(2, len(keys)) + keys = sorted(keys) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_matched_service(t): + args = _Args("*1", "*", []) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(2, len(keys)) + keys = sorted(keys) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_unmatched_service(t): + args = _Args("svc", "*", []) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_matched_monitor(t): + args = _Args("*", "*1", []) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(3, len(keys)) + keys = sorted(keys) + for n in range(3): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_unmatched_monitor(t): + args = _Args("*", "mon", []) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_nonoverlapping_service(t): + args = _Args("svc2", "*", ["abc-01", "abc-02"]) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_nonoverlapping_monitor(t): + args = _Args("*", "mon2", []) + cmd = ExpireDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + +def _make_config(_id, configId, guid): + config = DeviceProxy() + config.id = _id + config._config_id = configId + config._device_guid = guid + config.data = "fancy" + return config + + +def _compare_configs(self, cfg): + # _compare_configs used to monkeypatch DeviceProxy + # to make equivalent instances equal. + return all( + ( + self.id == cfg.id, + self._config_id == cfg._config_id, + self._device_guid == cfg._device_guid, + ) + ) diff --git a/Products/ZenCollector/configcache/cli/tests/test_list.py b/Products/ZenCollector/configcache/cli/tests/test_list.py new file mode 100644 index 0000000000..aa16e5c325 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/tests/test_list.py @@ -0,0 +1,280 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections + +from unittest import TestCase + +from mock import patch + +from Products.ZenCollector.services.config import DeviceProxy +from Products.Jobber.tests.utils import RedisLayer + +from ...cache import DeviceKey, DeviceRecord, ConfigStatus +from ...cache.storage import DeviceConfigStore +from ..list import ListDevice + + +_fields = collections.namedtuple( + "_fields", "service monitor device uid updated" +) + +PATH = {"src": "Products.ZenCollector.configcache.cli.list"} + +_Args = collections.namedtuple( + "_Args", "service collector show_uid device states" +) + + +class _BaseTest(TestCase): + # Base class to share setup code + + layer = RedisLayer + + fields = ( + _fields("svc1", "mon1", "abc-01", "/abc-01", 1234500.0), + _fields("svc1", "mon1", "abc-02", "/abc-02", 1234550.0), + _fields("svc2", "mon1", "efg-01", "/efg-01", 1234550.0), + ) + + def setUp(t): + DeviceProxy.__eq__ = _compare_configs + t.store = DeviceConfigStore(t.layer.redis) + t.config1 = _make_config("abc-01", "_abc_01", "abef394c") + t.config2 = _make_config("abc-02", "_abc_02", "fbd987ba") + t.config3 = _make_config("efg-01", "_efg_01", "39da34cf") + t.record1 = DeviceRecord.make( + t.fields[0].service, + t.fields[0].monitor, + t.fields[0].device, + t.fields[0].uid, + t.fields[0].updated, + t.config1, + ) + t.record2 = DeviceRecord.make( + t.fields[1].service, + t.fields[1].monitor, + t.fields[1].device, + t.fields[1].uid, + t.fields[1].updated, + t.config2, + ) + t.record3 = DeviceRecord.make( + t.fields[2].service, + t.fields[2].monitor, + t.fields[2].device, + t.fields[2].uid, + t.fields[2].updated, + t.config3, + ) + t.store.add(t.record1) + t.store.add(t.record2) + t.store.add(t.record3) + + def tearDown(t): + del t.store + del t.config1 + del t.config2 + del t.config3 + del t.record1 + del t.record2 + del t.record3 + del DeviceProxy.__eq__ + + +class ListDeviceTest(_BaseTest): + """Test the ListDevice class.""" + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_no_args(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "*", False, [], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(3, len(statuses)) + statuses = sorted(statuses, key=lambda s: s.key) + t.assertTrue( + all(isinstance(st, ConfigStatus.Current) for st in statuses) + ) + for n in range(3): + key = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(statuses[n].key, key) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_matched_device(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "*", False, ["abc*"], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(True) + t.assertEqual(2, len(statuses)) + statuses = sorted(statuses, key=lambda s: s.key) + t.assertTrue( + all(isinstance(st, ConfigStatus.Current) for st in statuses) + ) + for n in range(2): + key = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(statuses[n].key, key) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_multiple_devices(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "*", False, ["abc-01", "abc-02"], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(2, len(statuses)) + statuses = sorted(statuses, key=lambda s: s.key) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(statuses[n].key, expectedkey) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_unmatched_device(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "*", False, ["abc"], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(0, len(statuses)) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_matched_service(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*1", "*", False, [], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(2, len(statuses)) + statuses = sorted(statuses, key=lambda s: s.key) + for n in range(2): + key = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(statuses[n].key, key) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_unmatched_service(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("svc", "*", False, [], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(0, len(statuses)) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_matched_monitor(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "*1", False, [], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(3, len(statuses)) + statuses = sorted(statuses, key=lambda s: s.key) + for n in range(3): + key = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(statuses[n].key, key) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_unmatched_monitor(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "mon", False, [], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(0, len(statuses)) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_nonoverlapping_service(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("svc2", "*", False, ["abc-01", "abc-02"], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(0, len(statuses)) + t.assertDictEqual({}, oidmap) + + @patch("{src}.createObject".format(**PATH), autospec=True) + @patch("{src}.getRedisClient".format(**PATH), autospec=True) + def test_nonoverlapping_monitor(t, _getRedisClient, _createObject): + _getRedisClient.return_value = t.layer.redis + _createObject.return_value = t.store + args = _Args("*", "mon2", False, [], []) + cmd = ListDevice(args) + + statuses, oidmap = cmd._get(False) + t.assertEqual(0, len(statuses)) + t.assertDictEqual({}, oidmap) + + +def _make_config(_id, configId, guid): + config = DeviceProxy() + config.id = _id + config._config_id = configId + config._device_guid = guid + config.data = "fancy" + return config + + +def _compare_configs(self, cfg): + # _compare_configs used to monkeypatch DeviceProxy + # to make equivalent instances equal. + return all( + ( + self.id == cfg.id, + self._config_id == cfg._config_id, + self._device_guid == cfg._device_guid, + ) + ) diff --git a/Products/ZenCollector/configcache/cli/tests/test_remove.py b/Products/ZenCollector/configcache/cli/tests/test_remove.py new file mode 100644 index 0000000000..ffac417672 --- /dev/null +++ b/Products/ZenCollector/configcache/cli/tests/test_remove.py @@ -0,0 +1,222 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections + +from unittest import TestCase + +from Products.ZenCollector.services.config import DeviceProxy +from Products.Jobber.tests.utils import RedisLayer + +from ...cache import DeviceKey, DeviceRecord +from ...cache.storage import DeviceConfigStore +from ..remove import RemoveDevice + + +_fields = collections.namedtuple( + "_fields", "service monitor device uid updated" +) + +PATH = {"src": "Products.ZenCollector.configcache.cli.list"} + + +class _BaseTest(TestCase): + # Base class to share setup code + + layer = RedisLayer + + fields = ( + _fields("svc1", "mon1", "abc-01", "/abc-01", 1234500.0), + _fields("svc1", "mon1", "abc-02", "/abc-02", 1234550.0), + _fields("svc2", "mon1", "efg-01", "/efg-01", 1234550.0), + ) + + def setUp(t): + DeviceProxy.__eq__ = _compare_configs + t.store = DeviceConfigStore(t.layer.redis) + t.config1 = _make_config("abc-01", "_abc_01", "abef394c") + t.config2 = _make_config("abc-02", "_abc_02", "fbd987ba") + t.config3 = _make_config("efg-01", "_efg_01", "39da34cf") + t.record1 = DeviceRecord.make( + t.fields[0].service, + t.fields[0].monitor, + t.fields[0].device, + t.fields[0].uid, + t.fields[0].updated, + t.config1, + ) + t.record2 = DeviceRecord.make( + t.fields[1].service, + t.fields[1].monitor, + t.fields[1].device, + t.fields[1].uid, + t.fields[1].updated, + t.config2, + ) + t.record3 = DeviceRecord.make( + t.fields[2].service, + t.fields[2].monitor, + t.fields[2].device, + t.fields[2].uid, + t.fields[2].updated, + t.config3, + ) + t.store.add(t.record1) + t.store.add(t.record2) + t.store.add(t.record3) + + def tearDown(t): + del t.store + del t.config1 + del t.config2 + del t.config3 + del t.record1 + del t.record2 + del t.record3 + del DeviceProxy.__eq__ + + +_Args = collections.namedtuple("_Args", "service collector device") + + +class RemoveDeviceTest(_BaseTest): + """Test the RemoveDevice class.""" + + def test_no_args(t): + args = _Args("*", "*", []) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(3, len(keys)) + keys = sorted(keys) + for n in range(3): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_matched_device(t): + args = _Args("*", "*", ["abc*"]) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, True) + t.assertEqual(2, len(keys)) + keys = sorted(keys) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_unmatched_device(t): + args = _Args("*", "*", ["abc"]) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_multiple_devices(t): + args = _Args("*", "*", ["abc-01", "abc-02"]) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(2, len(keys)) + keys = sorted(keys) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_matched_service(t): + args = _Args("*1", "*", []) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(2, len(keys)) + keys = sorted(keys) + for n in range(2): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_unmatched_service(t): + args = _Args("svc", "*", []) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_matched_monitor(t): + args = _Args("*", "*1", []) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(3, len(keys)) + keys = sorted(keys) + for n in range(3): + expectedkey = DeviceKey( + t.fields[n].service, + t.fields[n].monitor, + t.fields[n].device, + ) + t.assertEqual(keys[n], expectedkey) + + def test_unmatched_monitor(t): + args = _Args("*", "mon", []) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_nonoverlapping_service(t): + args = _Args("svc2", "*", ["abc-01", "abc-02"]) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + def test_nonoverlapping_monitor(t): + args = _Args("*", "mon2", []) + cmd = RemoveDevice(args) + + keys = cmd._get(t.store, False) + t.assertEqual(0, len(keys)) + + +def _make_config(_id, configId, guid): + config = DeviceProxy() + config.id = _id + config._config_id = configId + config._device_guid = guid + config.data = "fancy" + return config + + +def _compare_configs(self, cfg): + # _compare_configs used to monkeypatch DeviceProxy + # to make equivalent instances equal. + return all( + ( + self.id == cfg.id, + self._config_id == cfg._config_id, + self._device_guid == cfg._device_guid, + ) + ) diff --git a/Products/ZenCollector/configcache/cli/tests/test_show.py b/Products/ZenCollector/configcache/cli/tests/test_show.py new file mode 100644 index 0000000000..d6b118183a --- /dev/null +++ b/Products/ZenCollector/configcache/cli/tests/test_show.py @@ -0,0 +1,158 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections + +from unittest import TestCase + +from Products.ZenCollector.services.config import DeviceProxy +from Products.Jobber.tests.utils import RedisLayer + +from ...cache import DeviceRecord +from ...cache.storage import DeviceConfigStore +from ..show import _query_cache + + +_fields = collections.namedtuple( + "_fields", "service monitor device uid updated" +) + +PATH = {"src": "Products.ZenCollector.configcache.cli.list"} + + +class _BaseTest(TestCase): + # Base class to share setup code + + layer = RedisLayer + + fields = ( + _fields("svc1", "mon1", "abc-01", "/abc-01", 1234500.0), + _fields("svc1", "mon1", "abc-02", "/abc-02", 1234550.0), + _fields("svc2", "mon1", "efg-01", "/efg-01", 1234550.0), + ) + + def setUp(t): + DeviceProxy.__eq__ = _compare_configs + t.store = DeviceConfigStore(t.layer.redis) + t.config1 = _make_config("abc-01", "_abc_01", "abef394c") + t.config2 = _make_config("abc-02", "_abc_02", "fbd987ba") + t.config3 = _make_config("efg-01", "_efg_01", "39da34cf") + t.record1 = DeviceRecord.make( + t.fields[0].service, + t.fields[0].monitor, + t.fields[0].device, + t.fields[0].uid, + t.fields[0].updated, + t.config1, + ) + t.record2 = DeviceRecord.make( + t.fields[1].service, + t.fields[1].monitor, + t.fields[1].device, + t.fields[1].uid, + t.fields[1].updated, + t.config2, + ) + t.record3 = DeviceRecord.make( + t.fields[2].service, + t.fields[2].monitor, + t.fields[2].device, + t.fields[2].uid, + t.fields[2].updated, + t.config3, + ) + t.store.add(t.record1) + t.store.add(t.record2) + t.store.add(t.record3) + + def tearDown(t): + del t.store + del t.config1 + del t.config2 + del t.config3 + del t.record1 + del t.record2 + del t.record3 + del DeviceProxy.__eq__ + + +class QueryCacheTest(_BaseTest): + """Test the _query_cache function.""" + + def test_unmatched_service(t): + svc, mon, dvc = ("1", "mon1", "abc-01") + + results, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNone(results) + t.assertIsNotNone(err) + + def test_unmatched_monitor(t): + svc, mon, dvc = ("svc1", "1", "abc-01") + + results, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNone(results) + t.assertIsNotNone(err) + + def test_unmatched_device(t): + svc, mon, dvc = ("svc1", "mon1", "abc") + + results, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNone(results) + t.assertIsNotNone(err) + + def test_multiple_devices(t): + svc, mon, dvc = ("svc1", "mon1", "abc*") + + results, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNone(results) + t.assertIsNotNone(err) + + def test_matching_service(t): + svc, mon, dvc = ("*1", "mon1", "abc-01") + + record, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNotNone(record) + t.assertIsNone(err) + + def test_matching_monitor(t): + svc, mon, dvc = ("svc1", "*1", "abc-01") + + record, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNotNone(record) + t.assertIsNone(err) + + def test_matching_device(t): + svc, mon, dvc = ("svc1", "mon1", "a*1") + + record, err = _query_cache(t.store, svc, mon, dvc) + t.assertIsNotNone(record) + t.assertIsNone(err) + + +def _make_config(_id, configId, guid): + config = DeviceProxy() + config.id = _id + config._config_id = configId + config._device_guid = guid + config.data = "fancy" + return config + + +def _compare_configs(self, cfg): + # _compare_configs used to monkeypatch DeviceProxy + # to make equivalent instances equal. + return all( + ( + self.id == cfg.id, + self._config_id == cfg._config_id, + self._device_guid == cfg._device_guid, + ) + ) diff --git a/Products/ZenCollector/configcache/configcache.py b/Products/ZenCollector/configcache/configcache.py new file mode 100644 index 0000000000..49aff85161 --- /dev/null +++ b/Products/ZenCollector/configcache/configcache.py @@ -0,0 +1,31 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from .app.args import get_arg_parser +from .cli import OidMap, Device +from .invalidator import Invalidator +from .manager import Manager +from .version import Version + + +def main(argv=None): + parser = get_arg_parser("configcache commands") + + subparsers = parser.add_subparsers(title="Commands") + + Version.add_arguments(parser, subparsers) + Manager.add_arguments(parser, subparsers) + Invalidator.add_arguments(parser, subparsers) + OidMap.add_arguments(parser, subparsers) + Device.add_arguments(parser, subparsers) + + args = parser.parse_args() + args.factory(args).run() diff --git a/Products/ZenCollector/configcache/configure.zcml b/Products/ZenCollector/configcache/configure.zcml new file mode 100644 index 0000000000..394852439e --- /dev/null +++ b/Products/ZenCollector/configcache/configure.zcml @@ -0,0 +1,13 @@ + + + + + + + + diff --git a/Products/ZenCollector/configcache/constants.py b/Products/ZenCollector/configcache/constants.py new file mode 100644 index 0000000000..1a21a2c5d0 --- /dev/null +++ b/Products/ZenCollector/configcache/constants.py @@ -0,0 +1,31 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + + +class Constants(object): + + device_build_timeout_id = "zDeviceConfigBuildTimeout" + device_pending_timeout_id = "zDeviceConfigPendingTimeout" + device_time_to_live_id = "zDeviceConfigTTL" + device_minimum_time_to_live_id = "zDeviceConfigMinimumTTL" + + # Default values + device_build_timeout_value = 7200 + device_pending_timeout_value = 7200 + device_time_to_live_value = 43200 + device_minimum_time_to_live_value = 0 + + oidmap_build_timeout_id = "configcache-oidmap-build-timeout" + oidmap_pending_timeout_id = "configcache-oidmap-pending-timeout" + oidmap_time_to_live_id = "configcache-oidmap-ttl" + + # Default values + oidmap_build_timeout_value = 7200 + oidmap_pending_timeout_value = 7200 + oidmap_time_to_live_value = 43200 diff --git a/Products/ZenCollector/configcache/debug.py b/Products/ZenCollector/configcache/debug.py new file mode 100644 index 0000000000..b6b354f72f --- /dev/null +++ b/Products/ZenCollector/configcache/debug.py @@ -0,0 +1,45 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import os +import signal +import sys + +from .app import pidfile + + +class Debug(object): + @classmethod + def from_args(cls, args): + return cls(args.pidfile) + + def __init__(self, pidfile): + self.pidfile = pidfile + + def run(self): + pf = pidfile({"pidfile": self.pidfile}) + try: + pid = pf.read() + try: + os.kill(pid, signal.SIGUSR1) + print( + "Signaled {} to toggle debug mode".format( + self.pidfile.split(".")[0] + .split("/")[-1] + .replace("-", " ") + ) + ) + except OSError as ex: + print("{} ({})".format(ex, pid), file=sys.stderr) + sys.exit(1) + except IOError as ex: + print("{}".format(ex), file=sys.stderr) + sys.exit(1) diff --git a/Products/ZenCollector/configcache/dispatcher.py b/Products/ZenCollector/configcache/dispatcher.py new file mode 100644 index 0000000000..2393b12596 --- /dev/null +++ b/Products/ZenCollector/configcache/dispatcher.py @@ -0,0 +1,83 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .tasks import build_device_config, build_oidmap + + +class DeviceConfigTaskDispatcher(object): + """Encapsulates the act of dispatching the build_device_config task.""" + + def __init__(self, configClasses): + """ + Initialize a DeviceConfigTaskDispatcher instance. + + The `configClasses` parameter should be the classes used to create + the device configurations. + + @type configClasses: Sequence[Class] + """ + self._classnames = { + cls.__module__: ".".join((cls.__module__, cls.__name__)) + for cls in configClasses + } + + @property + def service_names(self): + return self._classnames.keys() + + def dispatch_all(self, monitorid, deviceid, timeout, submitted): + """ + Submit a task to build a device configuration from each + configuration service. + """ + soft_limit, hard_limit = _get_limits(timeout) + for name in self._classnames.values(): + build_device_config.apply_async( + args=(monitorid, deviceid, name), + kwargs={"submitted": submitted}, + soft_time_limit=soft_limit, + time_limit=hard_limit, + ) + + def dispatch(self, servicename, monitorid, deviceid, timeout, submitted): + """ + Submit a task to build device configurations for the specified device. + + @type servicename: str + @type monitorid: str + @type deviceId: str + """ + name = self._classnames.get(servicename) + if name is None: + raise ValueError("service name '%s' not found" % servicename) + soft_limit, hard_limit = _get_limits(timeout) + build_device_config.apply_async( + args=(monitorid, deviceid, name), + kwargs={"submitted": submitted}, + soft_time_limit=soft_limit, + time_limit=hard_limit, + ) + + +class OidMapTaskDispatcher(object): + """Encapsulates the act of dispatching the build_oidmap_config task.""" + + def dispatch(self, timeout, submitted): + soft_limit, hard_limit = _get_limits(timeout) + build_oidmap.apply_async( + kwargs={"submitted": submitted}, + soft_time_limit=soft_limit, + time_limit=hard_limit, + ) + + +def _get_limits(timeout): + return timeout, (timeout + (timeout * 0.1)) diff --git a/Products/ZenCollector/configcache/handlers.py b/Products/ZenCollector/configcache/handlers.py new file mode 100644 index 0000000000..c7087f381a --- /dev/null +++ b/Products/ZenCollector/configcache/handlers.py @@ -0,0 +1,171 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import + +import time + +from .cache import DeviceKey, DeviceQuery, ConfigStatus + + +class NewDeviceHandler(object): + def __init__(self, log, store, dispatcher): + self.log = log + self.store = store + self.dispatcher = dispatcher + + def __call__(self, deviceId, monitor, buildlimit, newDevice=True): + all_keys = { + DeviceKey(svcname, monitor, deviceId) + for svcname in self.dispatcher.service_names + } + query = DeviceQuery(device=deviceId, monitor=monitor) + pending_keys = { + status.key + for status in self.store.query_statuses(query) + if isinstance(status, ConfigStatus.Pending) + } + non_pending_keys = all_keys - pending_keys + for key in pending_keys: + self.log.info( + "build job already submitted for this config " + "device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) + now = time.time() + self.store.set_pending(*((key, now) for key in non_pending_keys)) + for key in non_pending_keys: + self.dispatcher.dispatch( + key.service, key.monitor, key.device, buildlimit, now + ) + self.log.info( + "submitted build job for %s " + "device=%s collector=%s service=%s", + "new device" if newDevice else "device with new device class", + key.device, + key.monitor, + key.service, + ) + + +class DeviceUpdateHandler(object): + def __init__(self, log, store, dispatcher): + self.log = log + self.store = store + self.dispatcher = dispatcher + + def __call__(self, keys, minttl): + statuses = tuple( + status + for status in (self.store.get_status(key) for key in keys) + if not isinstance( + status, + ( + # These statuses won't get 'stuck' in a wait period + # before manager handles them. + ConfigStatus.Expired, + ConfigStatus.Retired, + ), + ) + ) + + now = time.time() + retirement = now - minttl + + # Transitioning to Retired is relevant only for Current. + retired = { + status.key + for status in statuses + if isinstance(status, ConfigStatus.Current) + and status.updated >= retirement + } + expired = { + status.key for status in statuses if status.key not in retired + } + + self.store.set_retired(*((key, now) for key in retired)) + self.store.set_expired(*((key, now) for key in expired)) + + for key in retired: + self.log.info( + "retired configuration of changed device " + "device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) + for key in expired: + self.log.info( + "expired configuration of changed device " + "device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) + + +class MissingConfigsHandler(object): + def __init__(self, log, store, dispatcher): + self.log = log + self.store = store + self.dispatcher = dispatcher + + def __call__(self, deviceId, monitor, keys, buildlimit): + """ + @param keys: These keys are associated with a config + @type keys: Sequence[DeviceKey] + """ + # Send a job for for all config services that don't currently have + # an associated configuration. Some ZenPacks, i.e. vSphere, defer + # their modeling to a later time, so jobs for configuration services + # must be sent to pick up any new configs. + hasconfigs = tuple(key.service for key in keys) + noconfigkeys = tuple( + DeviceKey(svcname, monitor, deviceId) + for svcname in self.dispatcher.service_names + if svcname not in hasconfigs + ) + # Identify all no-config keys that already have a status. + skipkeys = tuple( + key + for key in noconfigkeys + if self.store.get_status(key) is not None + ) + now = time.time() + for key in (k for k in noconfigkeys if k not in skipkeys): + self.store.set_pending((key, now)) + self.dispatcher.dispatch( + key.service, key.monitor, key.device, buildlimit, now + ) + self.log.debug( + "submitted build job for possibly missing config " + "device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) + + +class RemoveConfigsHandler(object): + def __init__(self, log, store): + self.log = log + self.store = store + + def __call__(self, keys): + self.store.remove(*keys) + for key in keys: + self.log.info( + "removed configuration of deleted device " + "device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) diff --git a/Products/ZenCollector/configcache/invalidator.py b/Products/ZenCollector/configcache/invalidator.py new file mode 100644 index 0000000000..a2dabf7495 --- /dev/null +++ b/Products/ZenCollector/configcache/invalidator.py @@ -0,0 +1,374 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function, absolute_import + +import logging +import time + +from multiprocessing import Process + +from metrology.instruments import Gauge, HistogramExponentiallyDecaying + +from zenoss.modelindex import constants +from zope.component import createObject + +import Products.ZenCollector.configcache as CONFIGCACHE_MODULE + +from Products.ZenModel.Device import Device +from Products.ZenModel.MibModule import MibModule +from Products.ZenModel.MibNode import MibNode +from Products.ZenModel.MibNotification import MibNotification +from Products.ZenModel.MibOrganizer import MibOrganizer +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl +from Products.Zuul.catalog.interfaces import IModelCatalogTool + +from .app import Application +from .app.args import get_subparser +from .cache import ConfigStatus, DeviceQuery +from .debug import Debug as DebugCommand +from .dispatcher import DeviceConfigTaskDispatcher, OidMapTaskDispatcher +from .handlers import ( + NewDeviceHandler, + DeviceUpdateHandler, + MissingConfigsHandler, + RemoveConfigsHandler, +) +from .modelchange import InvalidationCause +from .utils import ( + DeviceProperties, + getDeviceConfigServices, + OidMapProperties, + RelStorageInvalidationPoller, +) + +_default_interval = 30.0 + + +class Invalidator(object): + description = ( + "Analyzes changes in ZODB to determine whether to update " + "device configurations" + ) + + configs = (("modelchange.zcml", CONFIGCACHE_MODULE),) + + metric_prefix = "configcache.invalidations." + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, "invalidator", description=Invalidator.description + ) + subsubparsers = subp.add_subparsers(title="Invalidator Commands") + + subp_run = get_subparser( + subsubparsers, "run", description="Run the invalidator service" + ) + Application.add_all_arguments(subp_run) + subp_run.add_argument( + "--poll-interval", + default=_default_interval, + type=float, + help="Invalidation polling interval (in seconds)", + ) + subp_run.set_defaults( + factory=Application.from_args, + parser=subp_run, + task=Invalidator, + ) + + subp_debug = get_subparser( + subsubparsers, + "debug", + description=( + "Signal the invalidator service to toggle debug logging" + ), + ) + Application.add_pidfile_arguments(subp_debug) + subp_debug.set_defaults(factory=DebugCommand.from_args) + + Application.add_genconf_command(subsubparsers, (subp_run, subp_debug)) + + def __init__(self, config, context): + self.log = logging.getLogger("zen.configcache.invalidator") + self.ctx = context + + deviceconfigClasses = getDeviceConfigServices() + for cls in deviceconfigClasses: + self.log.info( + "using service class %s.%s", cls.__module__, cls.__name__ + ) + self.dispatchers = type( + "Dispatchers", + (object,), + { + "__slots__": ("device", "oidmap"), + "device": DeviceConfigTaskDispatcher(deviceconfigClasses), + "oidmap": OidMapTaskDispatcher(), + }, + )() + + client = getRedisClient(url=getRedisUrl()) + self.stores = type( + "Stores", + (object,), + { + "__slots__": ("device", "oidmap"), + "device": createObject("deviceconfigcache-store", client), + "oidmap": createObject("oidmapcache-store", client), + }, + )() + + self._process = _InvalidationProcessor( + self.log, self.stores, self.dispatchers + ) + + self.interval = config["poll-interval"] + + # metrics + self.ctx.metric_reporter.add_tags({"zenoss_daemon": "invalidator"}) + self._metrics = _Metrics(self.ctx.metric_reporter) + + def run(self): + # Handle changes that occurred when Invalidator wasn't running. + self._synchronize() + + poller = RelStorageInvalidationPoller( + self.ctx.db.storage, self.ctx.dmd + ) + self.log.info( + "polling for device changes every %s seconds", self.interval + ) + while not self.ctx.controller.shutdown: + try: + self.ctx.session.sync() + invalidations = poller.poll() + + self._metrics.received.mark(len(invalidations)) + self._metrics.processed.update(len(invalidations)) + + if not invalidations: + continue + + self._process_invalidations(invalidations) + finally: + self.ctx.metric_reporter.save() + # Call cacheGC to aggressively trim the ZODB cache + self.ctx.session.cacheGC() + self.ctx.controller.wait(self.interval) + + def _synchronize(self): + sync_deviceconfigs = Process( + target=_synchronize_deviceconfig_cache, + args=(self.log, self.ctx.dmd, self.dispatchers.device), + ) + sync_oidmaps = Process( + target=_synchronize_oidmap_cache, + args=(self.log, self.ctx.dmd, self.dispatchers.oidmap), + ) + sync_deviceconfigs.start() + sync_oidmaps.start() + sync_deviceconfigs.join() # blocks until subprocess has exited + sync_oidmaps.join() # blocks until subprocess has exited + + def _process_invalidations(self, invalidations): + self.log.debug("found %d relevant invalidations", len(invalidations)) + for inv in invalidations: + try: + self._process(inv.entity, inv.oid, inv.reason) + except Exception: + self.log.exception( + "failed to process invalidation entity=%s", + inv.entity, + ) + + +class InvalidationGauge(Gauge): + def __init__(self): + self._value = 0 + + @property + def value(self): + return self._value + + def mark(self, value): + self._value = value + + +def _synchronize_oidmap_cache(log, dmd, dispatcher): + store = createObject( + "oidmapcache-store", getRedisClient(url=getRedisUrl()) + ) + if store: + return + now = time.time() + store.set_pending(now) + buildlimit = OidMapProperties().build_timeout + dispatcher.dispatch(buildlimit, now) + log.info("submitted build job for oidmap") + + +_deviceconfig_solr_fields = ("id", "collector", "uid") + + +def _synchronize_deviceconfig_cache(log, dmd, dispatcher): + store = createObject( + "deviceconfigcache-store", getRedisClient(url=getRedisUrl()) + ) + tool = IModelCatalogTool(dmd) + catalog_results = tool.cursor_search( + types=("Products.ZenModel.Device.Device",), + limit=constants.DEFAULT_SEARCH_LIMIT, + fields=_deviceconfig_solr_fields, + ).results + devices = { + (brain.id, brain.collector): brain.uid + for brain in catalog_results + if brain.collector is not None + } + _removeDeleted(log, store, devices) + _addNewOrChangedDevices(log, store, dispatcher, dmd, devices) + + +def _removeDeleted(log, store, devices): + """ + Remove deleted devices from the cache. + + @param devices: devices that currently exist + @type devices: Mapping[Sequence[str, str], str] + """ + devices_not_found = tuple( + key + for key in store.search() + if (key.device, key.monitor) not in devices + ) + if devices_not_found: + RemoveConfigsHandler(log, store)(devices_not_found) + else: + log.info("no dangling configurations found") + + +def _addNewOrChangedDevices(log, store, dispatcher, dmd, devices): + # Add new devices to the config and metadata store. + # Also look for device that have changed their device class. + # Query the catalog for all devices + new_devices = 0 + changed_devices = 0 + handle = NewDeviceHandler(log, store, dispatcher) + for (deviceId, monitorId), uid in devices.iteritems(): + try: + device = dmd.unrestrictedTraverse(uid) + except Exception as ex: + log.warning( + "failed to get device error-type=%s error=%s uid=%s", + type(ex), + ex, + uid, + ) + continue + timeout = DeviceProperties(device).build_timeout + keys_with_configs = tuple( + store.search(DeviceQuery(monitor=monitorId, device=deviceId)) + ) + uid = device.getPrimaryId() + if not keys_with_configs: + handle(deviceId, monitorId, timeout) + new_devices += 1 + else: + current_uid = store.get_uid(deviceId) + # A device with a changed device class will have a different uid. + if current_uid != uid: + handle(deviceId, monitorId, timeout, False) + changed_devices += 1 + if new_devices == 0: + log.info("no missing configurations found") + if changed_devices == 0: + log.info("no devices with a different device class found") + + +class _InvalidationProcessor(object): + def __init__(self, log, stores, dispatchers): + self.log = log + self.stores = stores + self._remove = RemoveConfigsHandler(log, stores.device) + self._update = DeviceUpdateHandler( + log, stores.device, dispatchers.device + ) + self._missing = MissingConfigsHandler( + log, stores.device, dispatchers.device + ) + self._new = NewDeviceHandler(log, stores.device, dispatchers.device) + + def __call__(self, obj, oid, reason): + if isinstance(obj, Device): + self._handle_device(obj, oid, reason) + elif isinstance( + obj, (MibOrganizer, MibModule, MibNode, MibNotification) + ): + self._handle_mib(obj, oid, reason) + + def _handle_device(self, device, oid, reason): + uid = device.getPrimaryId() + self.log.info("handling device %s", uid) + devprops = DeviceProperties(device) + buildlimit = devprops.build_timeout + minttl = devprops.minimum_ttl + monitor = device.getPerformanceServerName() + if monitor is None: + self.log.warn( + "ignoring invalidated device having undefined collector " + "device=%s reason=%s", + device, + reason, + ) + return + keys_with_config = tuple( + self.stores.device.search( + DeviceQuery(monitor=monitor, device=device.id) + ) + ) + if not keys_with_config: + self._new(device.id, monitor, buildlimit) + elif reason is InvalidationCause.Updated: + # Check for device class change + stored_uid = self.stores.device.get_uid(device.id) + if uid != stored_uid: + self._new(device.id, monitor, buildlimit, False) + else: + self._update(keys_with_config, minttl) + self._missing(device.id, monitor, keys_with_config, buildlimit) + elif reason is InvalidationCause.Removed: + self._remove(keys_with_config) + else: + self.log.warn( + "ignored unexpected reason " + "reason=%s device=%s collector=%s device-oid=%r", + reason, + device, + monitor, + oid, + ) + + def _handle_mib(self, obj, oid, reason): + status = self.stores.oidmap.get_status() + if isinstance(status, (ConfigStatus.Expired, ConfigStatus.Pending)): + # Status is already updated, so do nothing. + return + now = time.time() + self.stores.oidmap.set_expired(now) + self.log.info("expired oidmap") + + +class _Metrics(object): + def __init__(self, reporter): + self.received = InvalidationGauge() + self.processed = HistogramExponentiallyDecaying() + reporter.register("received", self.received) + reporter.register("processed", self.processed) diff --git a/Products/ZenCollector/configcache/manager.py b/Products/ZenCollector/configcache/manager.py new file mode 100644 index 0000000000..71e3ca7825 --- /dev/null +++ b/Products/ZenCollector/configcache/manager.py @@ -0,0 +1,462 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function + +import logging + +from collections import Counter, defaultdict +from datetime import datetime +from time import time + +import attr + +from metrology.instruments import Gauge, HistogramExponentiallyDecaying +from metrology.utils.periodic import PeriodicTask +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from .app import Application +from .app.args import get_subparser +from .cache import ConfigStatus +from .constants import Constants +from .debug import Debug as DebugCommand +from .dispatcher import DeviceConfigTaskDispatcher, OidMapTaskDispatcher +from .propertymap import DevicePropertyMap +from .utils import getDeviceConfigServices, OidMapProperties + +_default_interval = 30.0 # seconds + + +class Manager(object): + description = ( + "Determines whether device configs are old and regenerates them" + ) + + metric_prefix = "configcache.status." + + @staticmethod + def add_arguments(parser, subparsers): + subp = get_subparser( + subparsers, "manager", description=Manager.description + ) + subsubparsers = subp.add_subparsers(title="Manager Commands") + + subp_run = get_subparser( + subsubparsers, "run", description="Run the manager service" + ) + Application.add_all_arguments(subp_run) + subp_run.add_argument( + "--check-interval", + default=_default_interval, + type=float, + help="Config checking interval (in seconds)", + ) + subp_run.set_defaults( + factory=Application.from_args, + parser=subp_run, + task=Manager, + ) + + subp_debug = get_subparser( + subsubparsers, + "debug", + description="Signal the manager service to toggle debug logging", + ) + Application.add_pidfile_arguments(subp_debug) + subp_debug.set_defaults(factory=DebugCommand.from_args) + + Application.add_genconf_command(subsubparsers, (subp_run, subp_debug)) + + def __init__(self, config, context): + self.ctx = context + configClasses = getDeviceConfigServices() + self.dispatchers = type( + "Dispatchers", + (object,), + { + "device": DeviceConfigTaskDispatcher(configClasses), + "oidmap": OidMapTaskDispatcher(), + }, + )() + client = getRedisClient(url=getRedisUrl()) + self.stores = type( + "Stores", + (object,), + { + "device": createObject("deviceconfigcache-store", client), + "oidmap": createObject("oidmapcache-store", client), + }, + )() + self.interval = config["check-interval"] + self.log = logging.getLogger("zen.configcache.manager") + + # metrics + self.ctx.metric_reporter.add_tags({"zenoss_daemon": "manager"}) + self._metric_collector = _MetricCollector(self.ctx.metric_reporter) + + def run(self): + self.log.info( + "checking for expired configurations and configuration build " + "timeouts every %s seconds", + self.interval, + ) + try: + self._metric_collector.start() + self._main() + finally: + self._metric_collector.stop() + self._metric_collector.join(timeout=5) + + def _main(self): + while not self.ctx.controller.shutdown: + try: + self.ctx.session.sync() + self._expire_timedout_builds() + self._expire_timedout_pending() + self._rebuild_configs() + except Exception as ex: + self.log.exception("unexpected error %s", ex) + finally: + # Call cacheGC to aggressively trim the ZODB cache + self.ctx.session.cacheGC() + self.ctx.controller.wait(self.interval) + + def _expire_timedout_builds(self): + timedout = tuple(self._get_device_build_timeouts()) + if not timedout: + self.log.debug("no device configuration builds have timed out") + else: + self._expire_device_configs(timedout, "build") + timedout = self._get_oidmap_build_timeout() + if not timedout: + self.log.debug("no oidmap configuration build has timed out") + else: + self._expire_oidmap_config(timedout, "build") + + def _expire_timedout_pending(self): + timedout = tuple(self._get_device_pending_timeouts()) + if not timedout: + self.log.debug( + "no pending device configuration builds have timed out" + ) + else: + self._expire_device_configs(timedout, "pending") + timedout = self._get_oidmap_pending_timeout() + if not timedout: + self.log.debug( + "no pending oidmap configuration build has timed out" + ) + else: + self._expire_oidmap_config(timedout, "pending") + + def _rebuild_configs(self): + statuses = self._get_device_configs_to_rebuild() + if statuses: + self._rebuild_device_configs(statuses) + self._maybe_rebuild_oidmap_config() + + def _get_device_build_timeouts(self): + buildlimitmap = DevicePropertyMap.make_build_timeout_map( + self.ctx.dmd.Devices + ) + # Test against a time 10 minutes earlier to minimize interfering + # with builder working on the same config. + now = time() - 600 + for status in self.stores.device.get_building(): + uid = self.stores.device.get_uid(status.key.device) + limit = buildlimitmap.get(uid) + if status.started < (now - limit): + yield ( + status, + "started", + status.started, + Constants.device_build_timeout_id, + limit, + ) + + def _get_oidmap_build_timeout(self): + status = self.stores.oidmap.get_status() + if not isinstance(status, ConfigStatus.Building): + return None + limit = OidMapProperties().build_timeout + now = time() - 600 + if status.started < (now - limit): + return ( + status, + "started", + status.started, + Constants.oidmap_build_timeout_id, + limit, + ) + + def _get_device_pending_timeouts(self): + pendinglimitmap = DevicePropertyMap.make_pending_timeout_map( + self.ctx.dmd.Devices + ) + now = time() + for status in self.stores.device.get_pending(): + uid = self.stores.device.get_uid(status.key.device) + limit = pendinglimitmap.get(uid) + if status.submitted < (now - limit): + yield ( + status, + "submitted", + status.submitted, + Constants.device_pending_timeout_id, + limit, + ) + + def _get_oidmap_pending_timeout(self): + status = self.stores.oidmap.get_status() + if not isinstance(status, ConfigStatus.Pending): + return None + limit = OidMapProperties().pending_timeout + now = time() + if status.submitted < (now - limit): + return ( + status, + "submitted", + status.submitted, + Constants.oidmap_pending_timeout_id, + limit, + ) + + def _expire_device_configs(self, data, kind): + now = time() + self.stores.device.set_expired( + *((status.key, now) for status, _, _, _, _ in data) + ) + for status, valId, val, limitId, limitValue in data: + self.log.info( + "expired device configuration due to %s timeout " + "%s=%s %s=%s service=%s collector=%s device=%s", + kind, + valId, + datetime.fromtimestamp(val).strftime("%Y-%m-%d %H:%M:%S"), + limitId, + limitValue, + status.key.service, + status.key.monitor, + status.key.device, + ) + + def _expire_oidmap_config(self, data, kind): + now = time() + self.stores.oidmap.set_expired(now) + status, valId, val, limitId, limitValue = data + self.log.info( + "expired oidmap configuration due to %s timeout %s=%s %s=%s", + kind, + valId, + datetime.fromtimestamp(val).strftime("%Y-%m-%d %H:%M:%S"), + limitId, + limitValue, + ) + + def _get_device_configs_to_rebuild(self): + minttl_map = DevicePropertyMap.make_minimum_ttl_map( + self.ctx.dmd.Devices + ) + ttl_map = DevicePropertyMap.make_ttl_map(self.ctx.dmd.Devices) + now = time() + + ready_to_rebuild = [] + + # Retrieve the 'retired' configs + for status in self.stores.device.get_retired(): + built = self.stores.device.get_updated(status.key) + uid = self.stores.device.get_uid(status.key.device) + if built is None or built < now - minttl_map.get(uid): + ready_to_rebuild.append(status) + + # Append the 'expired' configs + ready_to_rebuild.extend(self.stores.device.get_expired()) + + # Append the 'older' configs. + min_age = now - ttl_map.smallest_value() + for status in self.stores.device.get_older(min_age): + # Select the min ttl if the ttl is a smaller value + uid = self.stores.device.get_uid(status.key.device) + limit = max(minttl_map.get(uid), ttl_map.get(uid)) + expiration_threshold = now - limit + if status.updated <= expiration_threshold: + ready_to_rebuild.append(status) + + return ready_to_rebuild + + def _rebuild_device_configs(self, statuses): + buildlimitmap = DevicePropertyMap.make_build_timeout_map( + self.ctx.dmd.Devices + ) + count = 0 + for status in statuses: + uid = self.stores.device.get_uid(status.key.device) + timeout = buildlimitmap.get(uid) + now = time() + self.stores.device.set_pending((status.key, now)) + self.dispatchers.device.dispatch( + status.key.service, + status.key.monitor, + status.key.device, + timeout, + now, + ) + if isinstance(status, ConfigStatus.Expired): + self.log.info( + "submitted job to rebuild expired config " + "service=%s collector=%s device=%s", + status.key.service, + status.key.monitor, + status.key.device, + ) + elif isinstance(status, ConfigStatus.Retired): + self.log.info( + "submitted job to rebuild retired config " + "service=%s collector=%s device=%s", + status.key.service, + status.key.monitor, + status.key.device, + ) + else: + self.log.info( + "submitted job to rebuild old config " + "updated=%s %s=%s service=%s collector=%s device=%s", + datetime.fromtimestamp(status.updated).strftime( + "%Y-%m-%d %H:%M:%S" + ), + Constants.device_build_timeout_id, + timeout, + status.key.service, + status.key.monitor, + status.key.device, + ) + count += 1 + if count == 0: + self.log.debug("found no expired or old configurations to rebuild") + + def _maybe_rebuild_oidmap_config(self): + props = OidMapProperties() + status = self.stores.oidmap.get_status() + now = time() + if isinstance(status, ConfigStatus.Current): + if now < (status.updated + props.ttl): + return # oidmap not old enough for rebuild + elif isinstance(status, (ConfigStatus.Pending, ConfigStatus.Building)): + return # wrong status for an automatic rebuild + created = self.stores.oidmap.get_created() + if created: + created = datetime.fromtimestamp(created).strftime( + "%Y-%m-%d %H:%M:%S" + ) + else: + created = "n/a" + self.stores.oidmap.set_pending(now) + self.dispatchers.oidmap.dispatch(props.build_timeout, now) + self.log.info( + "submitted job to rebuild oidmap created=%s %s=%s", + created, + Constants.oidmap_build_timeout_id, + props.build_timeout, + ) + + +class _MetricCollector(PeriodicTask): + def __init__(self, reporter): + super(_MetricCollector, self).__init__(interval=60) + self._reporter = reporter + self._metrics = _Metrics(reporter) + self._store = None + + def task(self): + if self._store is None: + client = getRedisClient(url=getRedisUrl()) + self._store = createObject("deviceconfigcache-store", client) + try: + self._collect() + self._reporter.save() + except Exception: + logging.getLogger("zen.configcache.manager.metrics").exception( + "failed to collect/record metrics" + ) + + def _collect(self): + counts = Counter() + ages = defaultdict(list) + now = time() + for status in self._store.query_statuses(): + key, ts = attr.astuple(status) + ages[type(status)].append(int(now - ts)) + counts.update([type(status)]) + + self._metrics.count.current.mark(counts.get(ConfigStatus.Current, 0)) + self._metrics.count.retired.mark(counts.get(ConfigStatus.Retired, 0)) + self._metrics.count.expired.mark(counts.get(ConfigStatus.Expired, 0)) + self._metrics.count.pending.mark(counts.get(ConfigStatus.Pending, 0)) + self._metrics.count.building.mark(counts.get(ConfigStatus.Building, 0)) + + for age in ages.get(ConfigStatus.Current, []): + self._metrics.age.current.update(age) + for age in ages.get(ConfigStatus.Expired, []): + self._metrics.age.retired.update(age) + for age in ages.get(ConfigStatus.Retired, []): + self._metrics.age.expired.update(age) + for age in ages.get(ConfigStatus.Pending, []): + self._metrics.age.pending.update(age) + for age in ages.get(ConfigStatus.Building, []): + self._metrics.age.building.update(age) + + +class StatusCountGauge(Gauge): + def __init__(self): + self._value = 0 + + @property + def value(self): + return self._value + + def mark(self, value): + self._value = value + + +class _Metrics(object): + def __init__(self, reporter): + self.count = type( + "Count", + (object,), + { + "current": StatusCountGauge(), + "retired": StatusCountGauge(), + "expired": StatusCountGauge(), + "pending": StatusCountGauge(), + "building": StatusCountGauge(), + }, + )() + reporter.register("count.current", self.count.current) + reporter.register("count.retired", self.count.retired) + reporter.register("count.expired", self.count.expired) + reporter.register("count.pending", self.count.pending) + reporter.register("count.building", self.count.building) + self.age = type( + "Age", + (object,), + { + "current": HistogramExponentiallyDecaying(), + "retired": HistogramExponentiallyDecaying(), + "expired": HistogramExponentiallyDecaying(), + "pending": HistogramExponentiallyDecaying(), + "building": HistogramExponentiallyDecaying(), + }, + )() + reporter.register("age.current", self.age.current) + reporter.register("age.retired", self.age.retired) + reporter.register("age.expired", self.age.expired) + reporter.register("age.pending", self.age.pending) + reporter.register("age.building", self.age.building) diff --git a/Products/ZenCollector/configcache/meta.zcml b/Products/ZenCollector/configcache/meta.zcml new file mode 100644 index 0000000000..681ee9d0b5 --- /dev/null +++ b/Products/ZenCollector/configcache/meta.zcml @@ -0,0 +1,14 @@ + + + + + diff --git a/Products/ZenCollector/configcache/modelchange.zcml b/Products/ZenCollector/configcache/modelchange.zcml new file mode 100644 index 0000000000..d943a5e87f --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange.zcml @@ -0,0 +1,121 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Products/ZenCollector/configcache/modelchange/__init__.py b/Products/ZenCollector/configcache/modelchange/__init__.py new file mode 100644 index 0000000000..023c178973 --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/__init__.py @@ -0,0 +1,16 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .invalidation import Invalidation, InvalidationCause +from .processor import InvalidationProcessor + + +__all__ = ("Invalidation", "InvalidationCause", "InvalidationProcessor") diff --git a/Products/ZenCollector/configcache/modelchange/filters.py b/Products/ZenCollector/configcache/modelchange/filters.py new file mode 100644 index 0000000000..3682f6b26d --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/filters.py @@ -0,0 +1,263 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2011, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import logging +import re + +from cStringIO import StringIO +from hashlib import sha256 + +from Acquisition import aq_base +from zope.interface import implementer + +from Products.ZenHub.interfaces import ( + FILTER_CONTINUE, + FILTER_EXCLUDE, + FILTER_INCLUDE, + IInvalidationFilter, +) +from Products.ZenModel.DeviceClass import DeviceClass +from Products.ZenModel.GraphDefinition import GraphDefinition +from Products.ZenModel.GraphPoint import GraphPoint +from Products.ZenModel.IpAddress import IpAddress +from Products.ZenModel.IpNetwork import IpNetwork +from Products.ZenModel.Monitor import Monitor +from Products.ZenModel.MibModule import MibModule +from Products.ZenModel.MibNode import MibNode +from Products.ZenModel.MibNotification import MibNotification +from Products.ZenModel.MibOrganizer import MibOrganizer +from Products.ZenModel.OSProcessClass import OSProcessClass +from Products.ZenModel.OSProcessOrganizer import OSProcessOrganizer +from Products.ZenModel.ProductClass import ProductClass +from Products.ZenModel.ServiceClass import ServiceClass +from Products.ZenModel.Software import Software +from Products.ZenWidgets.Portlet import Portlet +from Products.Zuul.catalog.interfaces import IModelCatalogTool + +from ..constants import Constants + +log = logging.getLogger("zen.{}".format(__name__.split(".")[-1].lower())) + + +@implementer(IInvalidationFilter) +class IgnorableClassesFilter(object): + """Ignore invalidations on certain classes.""" + + CLASSES_TO_IGNORE = ( + IpAddress, + IpNetwork, + GraphDefinition, + GraphPoint, + Monitor, + Portlet, + ProductClass, + ServiceClass, + Software, + ) + + def initialize(self, context): + pass + + def include(self, obj): + if isinstance(obj, self.CLASSES_TO_IGNORE): + log.debug("IgnorableClassesFilter is ignoring %s ", obj) + return FILTER_EXCLUDE + return FILTER_CONTINUE + + +_iszorcustprop = re.compile("[zc][A-Z]").match + +_excluded_properties = ( + Constants.device_build_timeout_id, + Constants.device_pending_timeout_id, + Constants.device_time_to_live_id, + Constants.device_minimum_time_to_live_id, +) + + +def _include_property(propId): + if propId in _excluded_properties: + return None + return _iszorcustprop(propId) + + +def _getZorCProperties(organizer): + for zId in sorted(organizer.zenPropertyIds(pfilt=_include_property)): + try: + if organizer.zenPropIsPassword(zId): + propertyString = organizer.getProperty(zId, "") + else: + propertyString = organizer.zenPropertyString(zId) + yield zId, propertyString + except AttributeError: + # ZEN-3666: If an attribute error is raised on a zProperty + # assume it was produced by a zenpack + # install whose daemons haven't been restarted and continue + # excluding the offending property. + log.debug("Excluding '%s' property", zId) + + +@implementer(IInvalidationFilter) +class BaseOrganizerFilter(object): + """ + Base invalidation filter for organizers. + + The default implementation will reject organizers that do not have + updated calculated checksum values. The checksum is calculated using + accumulation of each 'z' and 'c' property associated with organizer. + """ + + weight = 10 + + def __init__(self, types): + self._types = types + + def getRoot(self, context): + return context.dmd.primaryAq() + + def initialize(self, context): + root = self.getRoot(context) + brains = IModelCatalogTool(root).search(self._types) + results = {} + for brain in brains: + try: + obj = brain.getObject() + results[brain.getPath()] = self.organizerChecksum(obj) + except KeyError: + log.warn("Unable to retrieve object: %s", brain.getPath()) + self.checksum_map = results + + def organizerChecksum(self, organizer): + m = sha256() + self.generateChecksum(organizer, m) + return m.hexdigest() + + def generateChecksum(self, organizer, hash_checksum): + # Checksum all zProperties and custom properties + for zId, propertyString in _getZorCProperties(organizer): + hash_checksum.update("%s|%s" % (zId, propertyString)) + + def include(self, obj): + # Move on if it's not one of our types + if not isinstance(obj, self._types): + return FILTER_CONTINUE + + # Checksum the device class + current_checksum = self.organizerChecksum(obj) + organizer_path = "/".join(obj.getPrimaryPath()) + + # Get what we have right now and compare + existing_checksum = self.checksum_map.get(organizer_path) + if current_checksum != existing_checksum: + log.debug("%r has a new checksum! Including.", obj) + self.checksum_map[organizer_path] = current_checksum + return FILTER_CONTINUE + log.debug("%r checksum unchanged. Skipping.", obj) + return FILTER_EXCLUDE + + +class DeviceClassInvalidationFilter(BaseOrganizerFilter): + """ + Invalidation filter for DeviceClass organizers. + + Uses both 'z' and 'c' properties as well as locally bound RRD templates + to create the checksum. + """ + + def __init__(self): + super(DeviceClassInvalidationFilter, self).__init__((DeviceClass,)) + + def getRoot(self, context): + return context.dmd.Devices.primaryAq() + + def generateChecksum(self, organizer, hash_checksum): + """ + Generate a checksum representing the state of the device class as it + pertains to configuration. This takes into account templates and + zProperties, nothing more. + """ + s = StringIO() + # Checksum includes all bound templates + for tpl in organizer.rrdTemplates(): + s.seek(0) + s.truncate() + try: + tpl.exportXml(s) + except Exception: + log.exception( + "unable to export XML of template template=%r", tpl + ) + else: + hash_checksum.update(s.getvalue()) + # Include z/c properties from base class + super(DeviceClassInvalidationFilter, self).generateChecksum( + organizer, hash_checksum + ) + + +class OSProcessOrganizerFilter(BaseOrganizerFilter): + """Invalidation filter for OSProcessOrganizer objects.""" + + def __init__(self): + super(OSProcessOrganizerFilter, self).__init__((OSProcessOrganizer,)) + + def getRoot(self, context): + return context.dmd.Processes.primaryAq() + + +class OSProcessClassFilter(BaseOrganizerFilter): + """ + Invalidation filter for OSProcessClass objects. + + This filter uses 'z' and 'c' properties as well as local _properties + defined on the organizer to create a checksum. + """ + + def __init__(self): + super(OSProcessClassFilter, self).__init__((OSProcessClass,)) + + def getRoot(self, context): + return context.dmd.Processes.primaryAq() + + def generateChecksum(self, organizer, hash_checksum): + # Include properties of OSProcessClass + for prop in organizer._properties: + prop_id = prop["id"] + hash_checksum.update( + "%s|%s" % (prop_id, getattr(organizer, prop_id, "")) + ) + # Include z/c properties from base class + super(OSProcessClassFilter, self).generateChecksum( + organizer, hash_checksum + ) + + +@implementer(IInvalidationFilter) +class MibFilter(object): + """ + Invalidation filter for MibModule objects. + + This filter uses 'z' and 'c' properties as well as local _properties + defined on the organizer to create a checksum. + """ + + def initialize(self, context): + pass + + def include(self, obj): + if not isinstance( + obj, (MibOrganizer, MibModule, MibNode, MibNotification) + ): + return FILTER_CONTINUE + # log.info( + # "Detected a MIB invalidation type=%s id=%s", + # type(aq_base(obj)).__name__, + # obj.id, + # ) + return FILTER_INCLUDE diff --git a/Products/ZenCollector/configcache/modelchange/invalidation.py b/Products/ZenCollector/configcache/modelchange/invalidation.py new file mode 100644 index 0000000000..b45ac9ee0c --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/invalidation.py @@ -0,0 +1,39 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +import attr + +from attr.validators import instance_of +from enum import IntEnum + +from Products.ZenRelations.PrimaryPathObjectManager import ( + PrimaryPathObjectManager, +) + +log = logging.getLogger("zen.{}".format(__name__.split(".")[-1].lower())) + + +class InvalidationCause(IntEnum): + """An enumeration of reasons for the invalidation.""" + + Removed = 1 + Updated = 2 + + +@attr.s(frozen=True, slots=True) +class Invalidation(object): + """Contains the OID and the entity referenced by the OID.""" + + oid = attr.ib(validator=instance_of(str)) + entity = attr.ib(validator=instance_of(PrimaryPathObjectManager)) + reason = attr.ib(validator=instance_of(InvalidationCause)) diff --git a/Products/ZenCollector/configcache/modelchange/oids.py b/Products/ZenCollector/configcache/modelchange/oids.py new file mode 100644 index 0000000000..d366709632 --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/oids.py @@ -0,0 +1,262 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +from zenoss.modelindex.model_index import CursorSearchParams +from zExceptions import NotFound +from zope.interface import implementer + +from Products.ZenHub.interfaces import IInvalidationOid +from Products.ZenModel.Device import Device +from Products.ZenRelations.RelationshipBase import IRelationship +from Products.Zuul.catalog.interfaces import IModelCatalogTool + +log = logging.getLogger("zen.configcache.modelchange") + + +class BaseTransform(object): + def __init__(self, entity): + self._entity = entity + + +@implementer(IInvalidationOid) +class IdentityOidTransform(BaseTransform): + """Identity transformation returns the OID that was given.""" + + def transformOid(self, oid): + log.debug( + "[IdentityOidTransform] entity=%s oid=%r", self._entity, oid + ) + return oid + + +@implementer(IInvalidationOid) +class RootMibOrganizer(BaseTransform): + """ + Transform into the root MibOrganizer. + """ + + def transformOid(self, oid): + return self._entity.getDmdRoot("Mibs")._p_oid + + +@implementer(IInvalidationOid) +class ComponentOidTransform(BaseTransform): + """ + If the object has a relationship with a device, return the device's OID. + """ + + def transformOid(self, oid): + funcs = ( + lambda: self._getdevice(self._entity), + self._from_os, + self._from_hw, + ) + for fn in funcs: + device = fn() + if device: + log.debug( + "[ComponentOidTransform] transformed oid to device " + "entity=%s oid=%r device=%s", + self._entity, + oid, + device, + ) + return device._p_oid + log.debug( + "[ComponentOidTransform] oid not transformed entity=%s oid=%r", + self._entity, + oid, + ) + return oid + + def _from_os(self): + return self._getdevice(getattr(self._entity, "os", None)) + + def _from_hw(self): + return self._getdevice(getattr(self._entity, "hw", None)) + + def _getdevice(self, entity): + if entity is None: + return + if isinstance(entity, IRelationship): + entity = entity() + return getattr(entity, "device", lambda: None)() + + +@implementer(IInvalidationOid) +class DataPointToDevice(BaseTransform): + """Return the device OIDs associated with an RRDDataPoint.""" + + def transformOid(self, oid): + ds = _getDataSource(self._entity) + if not ds: + return () + template = _getTemplate(ds.primaryAq()) + if not template: + return () + dc = _getDeviceClass(template) + if dc: + log.debug( + "[DataPointToDevice] return OIDs of devices associated " + "with DataPoint entity=%s", + self._entity, + ) + return _getDevicesFromDeviceClass(dc) + + log.debug( + "[DataPointToDevice] return OID of device associated " + "with DataPoint of local RRDTemplate entity=%s", + self._entity, + ) + return _getDeviceFromLocalTemplate(template) + + +@implementer(IInvalidationOid) +class DataSourceToDevice(BaseTransform): + """Return the device OIDs associated with an RRDDataSource.""" + + def transformOid(self, oid): + template = _getTemplate(self._entity) + if not template: + return () + dc = _getDeviceClass(template) + if dc: + log.debug( + "[DataSourceToDevice] return OIDs of devices associated " + "with DataSource entity=%s", + self._entity, + ) + return _getDevicesFromDeviceClass(dc) + + log.debug( + "[DataSourceToDevice] return OID of device associated " + "with DataSource of local RRDTemplate entity=%s", + self._entity, + ) + return _getDeviceFromLocalTemplate(template) + + +@implementer(IInvalidationOid) +class TemplateToDevice(BaseTransform): + """Return the device OIDs associated with an RRDTemplate.""" + + def transformOid(self, oid): + dc = _getDeviceClass(self._entity) + if dc: + log.debug( + "[TemplateToDevice] return OIDs of devices associated " + "with RRDTemplate entity=%s", + self._entity, + ) + return _getDevicesFromDeviceClass(dc) + + log.debug( + "[TemplateToDevice] return OID of device associated " + "with local RRDTemplate entity=%s", + self._entity, + ) + return _getDeviceFromLocalTemplate(self._entity) + + +@implementer(IInvalidationOid) +class DeviceClassToDevice(BaseTransform): + """Return the device OIDs in the DeviceClass hierarchy.""" + + def transformOid(self, oid): + log.debug( + "[DeviceClassToDevice] return OIDs of devices associated " + "with DeviceClass entity=%s", + self._entity, + ) + return _getDevicesFromDeviceClass(self._entity) + + +@implementer(IInvalidationOid) +class ThresholdToDevice(BaseTransform): + """Return the device OIDs in the DeviceClass hierarchy.""" + + def transformOid(self, oid): + template = _getTemplate(self._entity) + if not template: + return () + dc = _getDeviceClass(template) + if dc: + log.debug( + "[ThresholdToDevice] return OIDs of devices associated " + "with threshold entity=%s", + self._entity, + ) + return _getDevicesFromDeviceClass(dc) + + log.debug( + "[ThresholdToDevice] return OID of device associated " + "with Threshold of local RRDTemplate entity=%s", + self._entity, + ) + return _getDeviceFromLocalTemplate(template) + + +def _getDataSource(dp): + ds = dp.datasource() + if ds is None: + if log.isEnabledFor(logging.DEBUG): + log.warn("no datasource relationship datapoint=%s", dp) + return None + return ds.primaryAq() + + +def _getTemplate(ds): + template = ds.rrdTemplate() + if template is None: + if log.isEnabledFor(logging.DEBUG): + log.warn("no template relationship datasource=%s", ds) + return None + return template.primaryAq() + + +def _getDeviceClass(template): + dc = template.deviceClass() + if dc is None: + if log.isEnabledFor(logging.DEBUG): + log.warn("no device class relationship template=%s", template) + return None + return dc.primaryAq() + + +def _getDeviceFromLocalTemplate(template): + obj = template + while not isinstance(obj, Device): + try: + obj = obj.getParentNode() + except Exception: + if log.isEnabledFor(logging.DEBUG): + log.warn("unable to find device template=%r", template) + return None + return obj._p_oid + + +def _getDevicesFromDeviceClass(dc): + tool = IModelCatalogTool(dc.dmd.Devices) + query, _ = tool._build_query( + types=("Products.ZenModel.Device.Device",), + paths=("{}*".format("/".join(dc.getPhysicalPath())),), + ) + params = CursorSearchParams(query) + result = tool.model_catalog_client.cursor_search(params, dc.dmd) + for brain in result.results: + try: + ob = brain.getObject() + if ob: + yield ob._p_oid + except (NotFound, KeyError, AttributeError): + pass diff --git a/Products/ZenCollector/configcache/modelchange/pipeline.py b/Products/ZenCollector/configcache/modelchange/pipeline.py new file mode 100644 index 0000000000..2b8e63ed1f --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/pipeline.py @@ -0,0 +1,126 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +from .utils import coroutine, into_tuple + +log = logging.getLogger("zen.configcache.modelchange.pipeline") + + +class Pipe(object): + """ + Abstract base class for the pipes in a pipeline. + + A pipeline is a sequence of generators (coroutines) where each generator + returns data that is forward to the next generator. The `node` method + creates the generator. + + Pipelines are push-style, meaning that the generators do not run until + data is sent into the pipeline. Calling `send` on the generator returned + by `node` pushes data into the pipeline. The `send` function blocks until + the pipeline has finished. + + A Pipe may have one or more outputs or no outputs. Each output includes + an ID to identify which Pipe the output is forwarded to. If there's only + one output, no ID is required and the default ID is used. + + Use the `connect` method to connect one Pipe to another. + + Returning None from `run` (an Action object) stops the pipeline. When + stopped in this way, the pipeline is ready for the next input. + + :param targets: References to the next nodes in the pipeline. + :type targets: Dict[Any, GeneratorType] + :param run: References the callable that's invoked when data is applied. + :type run: Action + """ + + def __init__(self, action): + """Initialize a Pipe instance. + + :param action: Called to process the data passed to this node. + :type action: callable + """ + self.targets = {} + self.run = action + + @coroutine + def node(self): + """Returns the node that forms the pipeline.""" + while True: + args = yield + self.apply(args) + + def apply(self, args): + """Applies the arguments to the action.""" + args = into_tuple(args) + results = self.run(*args) + if results is None: + return + results = into_tuple(results) + if len(results) == 1: + tid, output = self.run.DEFAULT, results[0] + else: + tid, output = results[0], results[1] + if tid not in self.targets: + log.warn("no such target ID: %s", tid) + return + self.targets[tid].send(output) + + def connect(self, target, tid=None): + """ + Connects a Pipe to a specific output. + + If this node will have only one output, a default ID can be used. + + :param target: The pipeline node to receive the output. + :type target: Pipe + :param tid: The ID of the output. + :type tid: int + """ + tid = tid if tid is not None else self.run.DEFAULT + self.targets[tid] = target.node() + + +class IterablePipe(Pipe): + """ + A variation of the Pipe that iterates over the input passing + each item to `run` rather than passing all the data at once. + + If a `None` is returned by `run`, rather than stopping, an IterablePipe + continues on to the next item in the iterable. + """ + + def apply(self, args): + iterable = into_tuple(args) + for item in iterable: + super(IterablePipe, self).apply(item) + + +class Action(object): + """Base class for action objects passed to Pipes.""" + + DEFAULT = object() + """Default target ID.""" + + def __call__(self, *data): + """ + Processes the given data and returns data that is forwarded to the + next node in the pipeline. + + If the returned data is intended for a specific target, return a + two-element tuple where the target ID is the first element and the + output data is the second element. + + :rtype: Any | (Any, Any) + """ + raise NotImplementedError("'__call__' method not implemented") diff --git a/Products/ZenCollector/configcache/modelchange/processor.py b/Products/ZenCollector/configcache/modelchange/processor.py new file mode 100644 index 0000000000..569ea57dc7 --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/processor.py @@ -0,0 +1,228 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +from itertools import chain + +from ZODB.POSException import POSKeyError +from zope.component import subscribers + +from Products.ZenHub.interfaces import ( + FILTER_INCLUDE, + FILTER_EXCLUDE, + IInvalidationOid, +) +from Products.ZenModel.DeviceComponent import DeviceComponent +from Products.ZenRelations.PrimaryPathObjectManager import ( + PrimaryPathObjectManager, +) + +from .invalidation import Invalidation, InvalidationCause +from .pipeline import Pipe, IterablePipe, Action +from .utils import into_tuple + +log = logging.getLogger("zen.configcache.modelchange") + + +class InvalidationProcessor(object): + """Takes an invalidation and produces ZODB objects. + + An invalidation is represented as an ZODB object ID (oid). + An oid is accepted by the `get` method and a sequence of ZODB objects + are returned. Generally, only one object is in the sequence, but it + is possible for more than one object to be returned. + """ + + def __init__(self, app, filters): + """Initialize an InvalidationProcessor instance. + + :param app: The dmd object + :type app: + :param filters: A list of filters to apply to the invalidation + :type filters: + """ + oid2obj_1 = Pipe(OidToObject(app)) + oid2obj_2 = IterablePipe(OidToObject(app)) + apply_filter = Pipe(ApplyFilters(filters)) + apply_transforms = Pipe(ApplyTransforms()) + + self.__results = CollectInvalidations() + collect = Pipe(self.__results) + + oid2obj_1.connect(apply_filter) + oid2obj_1.connect(collect, tid=OidToObject.SINK) + apply_filter.connect(apply_transforms) + apply_transforms.connect(oid2obj_2) + apply_transforms.connect(collect, tid=ApplyTransforms.SINK) + oid2obj_2.connect(collect) + oid2obj_2.connect(collect, tid=OidToObject.SINK) + + self.__pipeline = oid2obj_1.node() + + def apply(self, oid): + """Send data into the pipeline.""" + self.__pipeline.send(oid) + return self.__results.pop() + + +class OidToObject(Action): + """Validates the OID to ensure it references a device.""" + + SINK = "sink1" + + def __init__(self, app): + """ + Initialize an OidToObject instance. + + :param app: ZODB application root object. + :type app: OFS.Application.Application + """ + super(OidToObject, self).__init__() + self._app = app + + def __call__(self, oid): + try: + # Retrieve the object using its OID. + obj = self._app._p_jar[oid] + except POSKeyError: + # Skip if this OID doesn't exist. + return + # Exclude the object if it doesn't have the right base class. + if not isinstance(obj, (PrimaryPathObjectManager, DeviceComponent)): + return + try: + # Wrap the bare object into an Acquisition wrapper. + obj = obj.__of__(self._app.zport.dmd).primaryAq() + except (AttributeError, KeyError): + # An exception at this implies a deleted device. + return ( + self.SINK, + Invalidation(oid, obj, InvalidationCause.Removed), + ) + else: + return ( + self.DEFAULT, + Invalidation(oid, obj, InvalidationCause.Updated), + ) + + +class ApplyFilters(Action): + """ + Filter the invalidation against IInvalidationFilter objects. + + Invalidations explicitely excluded by a filter are dropped from the + pipeline. + """ + + def __init__(self, filters): + """ + Initialize a FilterObject instance. + + :param filters: The invalidation filters. + :type filters: Sequence[IInvalidationFilter] + """ + super(ApplyFilters, self).__init__() + self._filters = filters + + def __call__(self, invalidation): + for fltr in self._filters: + result = fltr.include(invalidation.entity) + if result in (FILTER_INCLUDE, FILTER_EXCLUDE): + if result is FILTER_EXCLUDE: + log.debug( + "invalidation excluded by filter filter=%r entity=%s", + fltr, + invalidation.entity, + ) + break + else: + result = FILTER_INCLUDE + if result is not FILTER_EXCLUDE: + return invalidation + + +class ApplyTransforms(Action): + """ + The invalidation pipeline concerns itself with certain types of + objects. The `ApplyTransforms` node determines whether the OID refers + to a nested object within a desired object type and if the OID is + a nested object, the OID of the parent object is returned to be used + in its place. + """ + + SINK = "sink2" + + def __call__(self, invalidation): + # First, get any subscription adapters registered as transforms + adapters = subscribers((invalidation.entity,), IInvalidationOid) + # Next check for an old-style (regular adapter) transform + try: + adapters = chain( + adapters, (IInvalidationOid(invalidation.entity),) + ) + except TypeError: + # No old-style adapter is registered + pass + transformed = set() + for adapter_ in adapters: + result = adapter_.transformOid(invalidation.oid) + if isinstance(result, str): + transformed.add(result) + elif hasattr(result, "__iter__"): + # If the transform didn't give back a string, it should have + # given back an iterable + transformed.update(result) + else: + log.warn( + "IInvalidationOid adaptor returned a bad result " + "adaptor=%r result=%r entity=%s oid=%s", + adapter_, + result, + invalidation.entity, + invalidation.oid, + ) + # Remove any Nones a transform may have returned. + transformed.discard(None) + # Remove the original OID from the set of transformed OIDs; + # we don't want the original OID if any OIDs were returned. + transformed.discard(invalidation.oid) + + if transformed: + return (self.DEFAULT, transformed) + return (self.SINK, (invalidation,)) + + +class CollectInvalidations(Action): + """Collects the results of the pipeline.""" + + def __init__(self): + self._output = set() + + def __call__(self, result): + results = into_tuple(result) + entities = [] + for result in results: + entities.append(result) + log.debug( + "collected an invalidation reason=%s entity=%s oid=%r", + result.reason, + result.entity, + result.oid, + ) + self._output.update(entities) + + def pop(self): + """Return the collected data, removing it from the set.""" + try: + return self._output.copy() + finally: + self._output.clear() diff --git a/Products/ZenCollector/configcache/modelchange/tests/__init__.py b/Products/ZenCollector/configcache/modelchange/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenCollector/configcache/modelchange/tests/mock_interface.py b/Products/ZenCollector/configcache/modelchange/tests/mock_interface.py new file mode 100644 index 0000000000..34a8ae4cd2 --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/tests/mock_interface.py @@ -0,0 +1,29 @@ +import types +from mock import Mock +from zope.interface import classImplements + + +def create_interface_mock(interface_class): + """given a Zope Interface class + return a Mock sub class + that implements the given Zope interface class. + + Mock objects created from this InterfaceMock will + have Attributes and Methods required in the Interface + will not have Attributes or Methods that are not specified + """ + + # the init method, automatically spec the interface methods + def init(self, *args, **kwargs): + Mock.__init__(self, *args, spec=interface_class.names(), **kwargs) + + # subclass named 'Mock' + name = interface_class.__name__ + "Mock" + + # create the class object and provide the init method + klass = types.TypeType(name, (Mock,), {"__init__": init}) + + # the new class should implement the interface + classImplements(klass, interface_class) + + return klass diff --git a/Products/ZenCollector/configcache/modelchange/tests/test_filters.py b/Products/ZenCollector/configcache/modelchange/tests/test_filters.py new file mode 100644 index 0000000000..3ae955dcc3 --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/tests/test_filters.py @@ -0,0 +1,295 @@ +from mock import Mock, patch, create_autospec +from Products.ZCatalog.interfaces import ICatalogBrain +from unittest import TestCase +from zope.interface.verify import verifyObject + +from ..filters import ( + _getZorCProperties, + _iszorcustprop, + BaseOrganizerFilter, + DeviceClass, + DeviceClassInvalidationFilter, + FILTER_CONTINUE, + FILTER_EXCLUDE, + IgnorableClassesFilter, + IInvalidationFilter, + sha256, + OSProcessClass, + OSProcessClassFilter, + OSProcessOrganizer, + OSProcessOrganizerFilter, +) +from .mock_interface import create_interface_mock + +PATH = {"path": "Products.ZenCollector.configcache.modelchange.filters"} + + +class IgnorableClassesFilterTest(TestCase): + def setUp(t): + t.icf = IgnorableClassesFilter() + + def test_init(t): + IInvalidationFilter.providedBy(t.icf) + # current version fails because weight attribute is not defined + # icf.weight = 1 + # verifyObject(IInvalidationFilter, icf) + t.assertTrue(hasattr(t.icf, "CLASSES_TO_IGNORE")) + + def test_initialize(t): + context = Mock(name="context") + t.icf.initialize(context) + # No return or side-effects + + def test_include(t): + obj = Mock(name="object") + out = t.icf.include(obj) + t.assertEqual(out, FILTER_CONTINUE) + + def test_include_excludes_classes_to_ignore(t): + t.icf.CLASSES_TO_IGNORE = str + out = t.icf.include("ignore me!") + t.assertEqual(out, FILTER_EXCLUDE) + + +class BaseOrganizerFilterTest(TestCase): + def setUp(t): + t.types = Mock(name="types") + t.bof = BaseOrganizerFilter(t.types) + + # @patch with autospec fails (https://bugs.python.org/issue23078) + # manually spec ZenPropertyManager + t.organizer = Mock( + name="Products.ZenRelations.ZenPropertyManager", + spec_set=[ + "zenPropertyIds", + "getProperty", + "zenPropIsPassword", + "zenPropertyString", + ], + ) + + def test_init(t): + IInvalidationFilter.providedBy(t.bof) + verifyObject(IInvalidationFilter, t.bof) + t.assertEqual(t.bof.weight, 10) + t.assertEqual(t.bof._types, t.types) + + def test_iszorcustprop(t): + result = _iszorcustprop("no match") + t.assertEqual(result, None) + result = _iszorcustprop("cProperty") + t.assertTrue(result) + result = _iszorcustprop("zProperty") + t.assertTrue(result) + + def test_getRoot(t): + context = Mock(name="context") + root = t.bof.getRoot(context) + t.assertEqual(root, context.dmd.primaryAq()) + + @patch( + "{path}.IModelCatalogTool".format(**PATH), autospec=True, spec_set=True + ) + def test_initialize(t, IModelCatalogTool): + # Create a Mock object that provides the ICatalogBrain interface + ICatalogBrainMock = create_interface_mock(ICatalogBrain) + brain = ICatalogBrainMock() + + IModelCatalogTool.return_value.search.return_value = [brain] + checksum = create_autospec(t.bof.organizerChecksum) + t.bof.organizerChecksum = checksum + context = Mock(name="context") + + t.bof.initialize(context) + + t.assertEqual( + t.bof.checksum_map, + {brain.getPath.return_value: checksum.return_value}, + ) + + def test_getZorCProperties(t): + zprop = Mock(name="zenPropertyId", spec_set=[]) + t.organizer.zenPropertyIds.return_value = [zprop, zprop] + + # getZorCProperties returns a generator + results = _getZorCProperties(t.organizer) + + t.organizer.zenPropIsPassword.return_value = False + zId, propertyString = next(results) + t.assertEqual(zId, zprop) + t.assertEqual( + propertyString, t.organizer.zenPropertyString.return_value + ) + t.organizer.zenPropertyString.assert_called_with(zprop) + + t.organizer.zenPropIsPassword.return_value = True + zId, propertyString = next(results) + t.assertEqual(zId, zprop) + t.assertEqual(propertyString, t.organizer.getProperty.return_value) + t.organizer.getProperty.assert_called_with(zprop, "") + + with t.assertRaises(StopIteration): + next(results) + + @patch( + "{path}._getZorCProperties".format(**PATH), + autospec=True, + spec_set=True, + ) + def test_generateChecksum(t, _getZorCProps): + zprop = Mock(name="zenPropertyId", spec_set=[]) + data = (zprop, "property_string") + _getZorCProps.return_value = [data] + actual = sha256() + + expect = sha256() + expect.update("%s|%s" % data) + + t.bof.generateChecksum(t.organizer, actual) + + _getZorCProps.assert_called_with(t.organizer) + t.assertEqual(actual.hexdigest(), expect.hexdigest()) + + @patch( + "{path}._getZorCProperties".format(**PATH), + autospec=True, + spec_set=True, + ) + def test_organizerChecksum(t, _getZorCProps): + zprop = Mock(name="zenPropertyId", spec_set=[]) + data = (zprop, "property_string") + _getZorCProps.return_value = [data] + + out = t.bof.organizerChecksum(t.organizer) + + expect = sha256() + expect.update("%s|%s" % data) + t.assertEqual(out, expect.hexdigest()) + + def test_include_ignores_non_matching_types(t): + t.bof._types = (str,) + ret = t.bof.include(False) + t.assertEqual(ret, FILTER_CONTINUE) + + def test_include_if_checksum_changed(t): + organizerChecksum = create_autospec(t.bof.organizerChecksum) + t.bof.organizerChecksum = organizerChecksum + t.bof._types = (Mock,) + obj = Mock(name="object", spec_set=["getPrimaryPath"]) + obj.getPrimaryPath.return_value = ["dmd", "brain"] + organizer_path = "/".join(obj.getPrimaryPath()) + t.bof.checksum_map = {organizer_path: "existing_checksum"} + organizerChecksum.return_value = "current_checksum" + + ret = t.bof.include(obj) + + t.assertEqual(ret, FILTER_CONTINUE) + + def test_include_if_checksum_unchanged(t): + organizerChecksum = create_autospec(t.bof.organizerChecksum) + t.bof.organizerChecksum = organizerChecksum + existing_checksum = "checksum" + current_checksum = "checksum" + organizerChecksum.return_value = current_checksum + t.bof._types = (Mock,) + obj = Mock(name="object", spec_set=["getPrimaryPath"]) + obj.getPrimaryPath.return_value = ["dmd", "brain"] + organizer_path = "/".join(obj.getPrimaryPath()) + t.bof.checksum_map = {organizer_path: existing_checksum} + + ret = t.bof.include(obj) + + t.assertEqual(ret, FILTER_EXCLUDE) + + +class DeviceClassInvalidationFilterTest(TestCase): + def setUp(t): + t.dcif = DeviceClassInvalidationFilter() + + def test_init(t): + IInvalidationFilter.providedBy(t.dcif) + verifyObject(IInvalidationFilter, t.dcif) + t.assertEqual(t.dcif._types, (DeviceClass,)) + + def test_getRoot(t): + context = Mock(name="context") + root = t.dcif.getRoot(context) + t.assertEqual(root, context.dmd.Devices.primaryAq()) + + @patch( + "{path}.BaseOrganizerFilter.generateChecksum".format(**PATH), + autospec=True, + spec_set=True, + ) + def test_generateChecksum(t, super_generateChecksum): + hash_checksum = sha256() + organizer = Mock( + name="Products.ZenRelations.ZenPropertyManager", + spec_set=["rrdTemplates"], + ) + rrdTemplate = Mock(name="rrdTemplate") + rrdTemplate.exportXml.return_value = "some exemel" + organizer.rrdTemplates.return_value = [rrdTemplate] + + t.dcif.generateChecksum(organizer, hash_checksum) + + # We cannot validate the output of the current version, refactor needed + rrdTemplate.exportXml.was_called_once() + super_generateChecksum.assert_called_with( + t.dcif, organizer, hash_checksum + ) + + +class OSProcessOrganizerFilterTest(TestCase): + def test_init(t): + ospof = OSProcessOrganizerFilter() + + IInvalidationFilter.providedBy(ospof) + verifyObject(IInvalidationFilter, ospof) + t.assertEqual(ospof._types, (OSProcessOrganizer,)) + + def test_getRoot(t): + ospof = OSProcessOrganizerFilter() + context = Mock(name="context") + root = ospof.getRoot(context) + t.assertEqual(root, context.dmd.Processes.primaryAq()) + + +class OSProcessClassFilterTest(TestCase): + def setUp(t): + t.ospcf = OSProcessClassFilter() + + def test_init(t): + IInvalidationFilter.providedBy(t.ospcf) + verifyObject(IInvalidationFilter, t.ospcf) + + t.assertEqual(t.ospcf._types, (OSProcessClass,)) + + def test_getRoot(t): + context = Mock(name="context") + root = t.ospcf.getRoot(context) + t.assertEqual(root, context.dmd.Processes.primaryAq()) + + @patch( + "{path}.BaseOrganizerFilter.generateChecksum".format(**PATH), + autospec=True, + spec_set=True, + ) + def test_generateChecksum(t, super_generateChecksum): + organizer = Mock( + name="Products.ZenRelations.ZenPropertyManager", + spec_set=["property_id", "_properties"], + ) + prop = {"id": "property_id"} + organizer._properties = [prop] + organizer.property_id = "value" + hash_checksum = sha256() + + t.ospcf.generateChecksum(organizer, hash_checksum) + + expect = sha256() + expect.update("%s|%s" % (prop["id"], getattr(organizer, prop["id"]))) + t.assertEqual(hash_checksum.hexdigest(), expect.hexdigest()) + super_generateChecksum.assert_called_with( + t.ospcf, organizer, hash_checksum + ) diff --git a/Products/ZenCollector/configcache/modelchange/tests/test_oids.py b/Products/ZenCollector/configcache/modelchange/tests/test_oids.py new file mode 100644 index 0000000000..f04641eb4f --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/tests/test_oids.py @@ -0,0 +1,64 @@ +from unittest import TestCase +from mock import Mock + +from Products.ZenRelations.PrimaryPathObjectManager import ( + PrimaryPathObjectManager, +) +from ..oids import ( + IdentityOidTransform, + ComponentOidTransform, + IInvalidationOid, +) + +from zope.interface.verify import verifyObject + + +class IdentityOidTransformTest(TestCase): + def setUp(self): + self.obj = Mock(spec_set=PrimaryPathObjectManager) + self.default_oid_transform = IdentityOidTransform(self.obj) + + def test_implements_IInvalidationOid(self): + # Provides the interface + IInvalidationOid.providedBy(self.default_oid_transform) + # Implements the interface it according to spec + verifyObject(IInvalidationOid, self.default_oid_transform) + + def test_init(self): + self.assertEqual(self.default_oid_transform._entity, self.obj) + + def test_transformOid(self): + ret = self.default_oid_transform.transformOid("unmodified oid") + self.assertEqual(ret, "unmodified oid") + + +class ComponentOidTransformTest(TestCase): + def setUp(self): + self.obj = Mock(spec_set=PrimaryPathObjectManager) + self.device_oid_transform = ComponentOidTransform(self.obj) + + def test_implements_IInvalidationOid(self): + # Provides the interface + IInvalidationOid.providedBy(self.device_oid_transform) + # Implements the interface it according to spec + verifyObject(IInvalidationOid, self.device_oid_transform) + + def test_init(self): + self.assertEqual(self.device_oid_transform._entity, self.obj) + + def test_transformOid(self): + """returns unmodified oid, if _entity has no device attribute""" + self.assertFalse(hasattr(self.obj, "device")) + ret = self.device_oid_transform.transformOid("unmodified oid") + self.assertEqual(ret, "unmodified oid") + + def test_transformOid_returns_device_oid(self): + """returns obj.device()._p_oid if obj.device exists""" + obj = Mock(name="PrimaryPathObjectManager", spec_set=["device"]) + device = Mock(name="device", spec_set=["_p_oid"]) + obj.device.return_value = device + + device_oid_transform = ComponentOidTransform(obj) + ret = device_oid_transform.transformOid("ignored oid") + + self.assertEqual(ret, obj.device.return_value._p_oid) diff --git a/Products/ZenCollector/configcache/modelchange/utils.py b/Products/ZenCollector/configcache/modelchange/utils.py new file mode 100644 index 0000000000..ff881d8339 --- /dev/null +++ b/Products/ZenCollector/configcache/modelchange/utils.py @@ -0,0 +1,34 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from functools import wraps as _wraps + +import six as _six + + +def coroutine(func): + """Decorator for initializing a generator as a coroutine.""" + + @_wraps(func) + def start(*args, **kw): + coro = func(*args, **kw) + coro.next() + return coro + + return start + + +def into_tuple(args): + if isinstance(args, _six.string_types): + return (args,) + elif not hasattr(args, "__iter__"): + return (args,) + return args diff --git a/Products/ZenCollector/configcache/propertymap.py b/Products/ZenCollector/configcache/propertymap.py new file mode 100644 index 0000000000..824f55cc3d --- /dev/null +++ b/Products/ZenCollector/configcache/propertymap.py @@ -0,0 +1,144 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import logging + +from .constants import Constants + +log = logging.getLogger("zen.configcache.propertymap") + + +class DevicePropertyMap(object): + """ + This class accepts a mapping of ZODB paths to a value. + + Users can interrogate the class instance by providing a path and + expecting the value from the mapping that best matches the given path. + + A 'best match' means the path with the longest match starting from the + left end of the path. + """ + + @classmethod + def make_ttl_map(cls, obj): + return cls( + getPropertyValues( + obj, + Constants.device_time_to_live_id, + Constants.device_time_to_live_value, + _getZProperty, + ), + Constants.device_time_to_live_value, + ) + + @classmethod + def make_minimum_ttl_map(cls, obj): + return cls( + getPropertyValues( + obj, + Constants.device_minimum_time_to_live_id, + Constants.device_minimum_time_to_live_value, + _getZProperty, + ), + Constants.device_minimum_time_to_live_value, + ) + + @classmethod + def make_pending_timeout_map(cls, obj): + return cls( + getPropertyValues( + obj, + Constants.device_pending_timeout_id, + Constants.device_pending_timeout_value, + _getZProperty, + ), + Constants.device_pending_timeout_value, + ) + + @classmethod + def make_build_timeout_map(cls, obj): + return cls( + getPropertyValues( + obj, + Constants.device_build_timeout_id, + Constants.device_build_timeout_value, + _getZProperty, + ), + Constants.device_build_timeout_value, + ) + + def __init__(self, values, default): + self.__values = tuple( + (p.split("/")[1:], v) for p, v in values.items() if v is not None + ) + self.__default = default + + def smallest_value(self): + try: + return min(self.__values, key=lambda item: item[1])[1] + except ValueError as ex: + # Check whether the ValueError is about an empty sequence. + # If it's not, re-raise the exception. + if "arg is an empty sequence" not in str(ex): + raise + return self.__default + + def get(self, request_uid): + # Be graceful on accepted input values. None is equivalent + # to no match so return the default value. + if request_uid is None: + return self.__default + # Split the request into its parts + req_parts = request_uid.split("/")[1:] + # Find all the path parts that match the request + matches = ( + (len(parts), value) + for parts, value in self.__values + if req_parts[0 : len(parts)] == parts + ) + try: + # Return the value associated with the path parts having + # the longest match with the request. + return max(matches, key=lambda item: item[0])[1] + except ValueError as ex: + # Check whether the ValueError is about an empty sequence. + # If it's not, re-raise the exception. + if "arg is an empty sequence" not in str(ex): + raise + # No path parts matched the request. + return self.__default + + +def getPropertyValues(obj, propname, default, getter, relName="devices"): + """ + Returns a mapping of UID -> property-value for the given z-property. + """ + values = {obj.getPrimaryId(): getter(obj, propname, default)} + values.update( + (inst.getPrimaryId(), getter(inst, propname, default)) + for inst in obj.getSubInstances(relName) + if inst.isLocal(propname) + ) + values.update( + (inst.getPrimaryId(), getter(inst, propname, default)) + for inst in obj.getOverriddenObjects(propname) + ) + if not values or any(v is None for v in values.values()): + raise RuntimeError( + "one or more values are None or z-property is missing " + "z-property=%s" % (propname,) + ) + return values + + +def _getZProperty(obj, propname, default): + value = obj.getZ(propname) + if value is None: + return default + return value diff --git a/Products/ZenCollector/configcache/store.zcml b/Products/ZenCollector/configcache/store.zcml new file mode 100644 index 0000000000..c355c7983d --- /dev/null +++ b/Products/ZenCollector/configcache/store.zcml @@ -0,0 +1,20 @@ + + + + + + + + + diff --git a/Products/ZenCollector/configcache/tasks/__init__.py b/Products/ZenCollector/configcache/tasks/__init__.py new file mode 100644 index 0000000000..62547f33ce --- /dev/null +++ b/Products/ZenCollector/configcache/tasks/__init__.py @@ -0,0 +1,13 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from .deviceconfig import build_device_config +from .oidmap import build_oidmap + +__all__ = ("build_device_config", "build_oidmap") diff --git a/Products/ZenCollector/configcache/tasks/deviceconfig.py b/Products/ZenCollector/configcache/tasks/deviceconfig.py new file mode 100644 index 0000000000..508774710d --- /dev/null +++ b/Products/ZenCollector/configcache/tasks/deviceconfig.py @@ -0,0 +1,299 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from datetime import datetime +from time import time + +from zope.component import createObject +from zope.dottedname.resolve import resolve + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from Products.Jobber.task import requires, DMD +from Products.Jobber.zenjobs import app + +from ..cache import DeviceKey, DeviceRecord, ConfigStatus +from ..constants import Constants +from ..utils import DeviceProperties + + +@app.task( + bind=True, + base=requires(DMD), + name="configcache.build_device_config", + summary="Create Device Configuration Task", + description_template="Create the configuration for device {2}.", + ignore_result=True, + dmd_read_only=True, +) +def build_device_config( + self, monitorname, deviceid, configclassname, submitted=None +): + """ + Create a configuration for the given device. + + @param monitorname: The name of the monitor/collector the device + is a member of. + @type monitorname: str + @param deviceid: The ID of the device + @type deviceid: str + @param configclassname: The fully qualified name of the class that + will create the device configuration. + @type configclassname: str + @param submitted: timestamp of when the job was submitted + @type submitted: float + """ + buildDeviceConfig( + self.dmd, self.log, monitorname, deviceid, configclassname, submitted + ) + + +# NOTE: the buildDeviceConfig function exists so that it can be tested +# without having to handle Celery details in the unit tests. + + +def buildDeviceConfig( + dmd, log, monitorname, deviceid, configclassname, submitted +): + svcconfigclass = resolve(configclassname) + svcname = configclassname.rsplit(".", 1)[0] + store = _getStore() + key = DeviceKey(svcname, monitorname, deviceid) + + # record when this build starts + started = time() + + # Check whether this is an old job, i.e. job pending timeout. + # If it is an old job, skip it, manager already sent another one. + status = store.get_status(key) + device = dmd.Devices.findDeviceByIdExact(deviceid) + if device is None: + log.warn( + "cannot build config because device was not found " + "device=%s collector=%s service=%s submitted=%f", + key.device, + key.monitor, + key.service, + submitted, + ) + # Speculatively delete the config because this device may have been + # re-identified under a new ID so the config keyed by the old ID + # should be removed. + _delete_config(key, store, log) + return + + if _job_is_old(status, submitted, started, device, log): + return + + # If the status is Expired, another job is coming, so skip this job. + if isinstance(status, ConfigStatus.Expired): + log.warn( + "skipped this job because another job is coming " + "device=%s collector=%s service=%s submitted=%f", + key.device, + key.monitor, + key.service, + submitted, + ) + return + + # If the status is Pending, verify whether it's for this job, and if not, + # skip this job. + if isinstance(status, ConfigStatus.Pending): + s1 = int(submitted * 1000) + s2 = int(status.submitted * 1000) + if s1 != s2: + log.warn( + "skipped this job in favor of newer job " + "device=%s collector=%s service=%s submitted=%f", + key.device, + key.monitor, + key.service, + submitted, + ) + return + + # Change the configuration's status to 'building' to indicate that + # a config is now building. + store.set_building((key, time())) + log.info( + "building device configuration device=%s collector=%s service=%s", + deviceid, + monitorname, + svcname, + ) + + service = svcconfigclass(dmd, monitorname) + method = getattr(service, "remote_getDeviceConfigs", None) + if method is None: + log.warn( + "config service does not have required API " + "device=%s collector=%s service=%s submitted=%f", + key.device, + key.monitor, + key.service, + submitted, + ) + # Services without a remote_getDeviceConfigs method can't create + # device configs, so delete the config that may exist. + _delete_config(key, store, log) + return + + result = service.remote_getDeviceConfigs((deviceid,)) + config = result[0] if result else None + + # get a new store; the prior store's connection may have gone stale. + store = _getStore() + if config is None: + log.info( + "no configuration built device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) + _delete_config(key, store, log) + else: + uid = device.getPrimaryId() + record = DeviceRecord.make( + svcname, monitorname, deviceid, uid, time(), config + ) + + # Get the current status of the configuration. + recent_status = store.get_status(key) + + # Test whether the status should be updated + update_status = _should_update_status( + recent_status, started, deviceid, monitorname, svcname, log + ) + + if not update_status: + # recent_status is not ConfigStatus.Building, so another job + # will be submitted or has already been submitted. + store.put_config(record) + log.info( + "saved config without changing status " + "updated=%s device=%s collector=%s service=%s", + datetime.fromtimestamp(record.updated).isoformat(), + deviceid, + monitorname, + svcname, + ) + else: + verb = "replaced" if status is not None else "added" + store.add(record) + log.info( + "%s config updated=%s device=%s collector=%s service=%s", + verb, + datetime.fromtimestamp(record.updated).isoformat(), + deviceid, + monitorname, + svcname, + ) + + +def _should_update_status( + recent_status, started, deviceid, monitorname, svcname, log +): + # Check for expected statuses. + if isinstance(recent_status, ConfigStatus.Building): + # The status is Building, so let's update the status. + return True + + if isinstance(recent_status, ConfigStatus.Expired): + update_status = bool(recent_status.expired < started) + if not update_status: + log.info( + "config expired while building config " + "device=%s collector=%s service=%s", + deviceid, + monitorname, + svcname, + ) + else: + log.warning( + "config status has inconsistent state status=Expired " + "expired=%s device=%s collector=%s service=%s", + datetime.fromtimestamp(recent_status.expired).isoformat(), + deviceid, + monitorname, + svcname, + ) + return update_status + + if isinstance(recent_status, ConfigStatus.Pending): + update_status = bool(recent_status.submitted < started) + if not update_status: + log.info( + "another job submitted while building config " + "device=%s collector=%s service=%s", + deviceid, + monitorname, + svcname, + ) + else: + log.warning( + "config status has inconsistent state status=Pending " + "submitted=%s device=%s collector=%s service=%s", + datetime.fromtimestamp(recent_status.submitted).isoformat(), + deviceid, + monitorname, + svcname, + ) + return update_status + + log.warning( + "Unexpected status change during config build " + "status=%s device=%s collector=%s service=%s", + type(recent_status).__name__, + deviceid, + monitorname, + svcname, + ) + return True + + +def _delete_config(key, store, log): + if key in store: + store.remove(key) + log.info( + "removed previously built configuration " + "device=%s collector=%s service=%s", + key.device, + key.monitor, + key.service, + ) + # Ensure all statuses for this key are deleted. + store.clear_status(key) + + +def _job_is_old(status, submitted, now, device, log): + if submitted is None or status is None: + # job is not old (default state) + return False + limit = DeviceProperties(device).pending_timeout + if submitted < (now - limit): + log.warn( + "skipped this job because it's too old " + "device=%s collector=%s service=%s submitted=%f %s=%s", + status.key.device, + status.key.monitor, + status.key.service, + submitted, + Constants.device_pending_timeout_id, + limit, + ) + return True + return False + + +def _getStore(): + client = getRedisClient(url=getRedisUrl()) + return createObject("deviceconfigcache-store", client) diff --git a/Products/ZenCollector/configcache/tasks/oidmap.py b/Products/ZenCollector/configcache/tasks/oidmap.py new file mode 100644 index 0000000000..f507359530 --- /dev/null +++ b/Products/ZenCollector/configcache/tasks/oidmap.py @@ -0,0 +1,197 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import json + +from datetime import datetime +from time import time + +from hashlib import md5 + +from zope.component import createObject + +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from Products.Jobber.task import requires, DMD +from Products.Jobber.zenjobs import app + +from ..cache import OidMapRecord, ConfigStatus +from ..constants import Constants +from ..utils import OidMapProperties + + +@app.task( + bind=True, + base=requires(DMD), + name="configcache.build_oidmap", + summary="Create OID Map Task", + description_template="Create an OID map for zentrap.", + ignore_result=True, + dmd_read_only=True, +) +def build_oidmap(self, submitted=None): + """ + Create a map of SNMP OIDs. + + @param configclassname: The fully qualified name of the class that + will create the device configuration. + @type configclassname: str + @param submitted: timestamp of when the job was submitted + @type submitted: float + """ + buildOidMap(self.dmd, self.log, submitted) + + +# NOTE: the buildOidMap function exists so that it can be tested +# without having to handle Celery details in the unit tests. + + +def buildOidMap(dmd, log, submitted): + store = _getStore() + + # record when this build starts + started = time() + + # Check whether this is an old job, i.e. job pending timeout. + # If it is an old job, skip it, manager already sent another one. + status = store.get_status() + + if _job_is_old(status, submitted, started, log): + return + + # If the status is Expired, another job is coming, so skip this job. + if isinstance(status, ConfigStatus.Expired): + log.warn( + "skipped this job because another job is coming submitted=%f", + submitted, + ) + return + + # If the status is Pending, verify whether it's for this job, and if not, + # skip this job. + if isinstance(status, ConfigStatus.Pending): + s1 = int(submitted * 1000) + s2 = int(status.submitted * 1000) + if s1 != s2: + log.warn( + "skipped this job in favor of newer job submitted=%f", + submitted, + ) + return + + # Change the configuration's status to 'building' to indicate that + # a config is now building. + store.set_building(time()) + log.info("building oidmap") + + oidmap = {b.oid: b.id for b in dmd.Mibs.mibSearch() if b.oid} + + # get a new store; the prior store's connection may have gone stale. + store = _getStore() + + if not oidmap: + log.info("no oidmap was built") + _delete_oidmap(store, log) + return + + checksum = md5( # noqa: S324 + json.dumps(oidmap, sort_keys=True).encode("utf-8") + ).hexdigest() + + record = OidMapRecord.make(time(), checksum, oidmap) + + # Get the current status of the configuration. + recent_status = store.get_status() + + # Test whether the status should be updated + update_status = _should_update_status(recent_status, started, log) + + created_ts = datetime.fromtimestamp(record.created).isoformat() + if not update_status: + # recent_status is not ConfigStatus.Building, so another job + # will be submitted or has already been submitted. + store.put_config(record) + log.info( + "saved oidmap without changing status created=%s", created_ts + ) + else: + verb = "replaced" if status is not None else "added" + store.add(record) + log.info("%s oidmap created=%s", verb, created_ts) + + +def _should_update_status(recent_status, started, log): + # Check for expected statuses. + if isinstance(recent_status, ConfigStatus.Building): + # The status is Building, so let's update the status. + return True + + if isinstance(recent_status, ConfigStatus.Expired): + update_status = bool(recent_status.expired < started) + if not update_status: + log.info("oidmap (re)expired while building new oidmap") + else: + log.warning( + "oidmap status has inconsistent state status=Expired " + "expired=%s", + datetime.fromtimestamp(recent_status.expired).isoformat(), + ) + return update_status + + if isinstance(recent_status, ConfigStatus.Pending): + update_status = bool(recent_status.submitted < started) + if not update_status: + log.info("another job submitted while building oidmap") + else: + log.warning( + "oidmap status has inconsistent state status=Pending " + "submitted=%s", + datetime.fromtimestamp(recent_status.submitted).isoformat(), + ) + return update_status + + log.warning( + "Unexpected status change during oidmap build status=%s", + type(recent_status).__name__, + ) + return True + + +def _delete_oidmap(store, log): + if not store: + return + store.remove() + log.info("removed previously built oidmap") + # Ensure all statuses for this key are deleted. + store.clear_status() + + +def _job_is_old(status, submitted, now, log): + if submitted is None or status is None: + # job is not old (default state) + return False + limit = OidMapProperties().pending_timeout + if submitted < (now - limit): + log.warn( + "skipped this job because it's too old " + "service=%s submitted=%f %s=%s", + status.key.service, + submitted, + Constants.oidmap_pending_timeout_id, + limit, + ) + return True + return False + + +def _getStore(): + client = getRedisClient(url=getRedisUrl()) + return createObject("oidmapcache-store", client) diff --git a/Products/ZenCollector/configcache/tests/__init__.py b/Products/ZenCollector/configcache/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenCollector/configcache/tests/test_build_device_config.py b/Products/ZenCollector/configcache/tests/test_build_device_config.py new file mode 100644 index 0000000000..6c757d3752 --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_build_device_config.py @@ -0,0 +1,132 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import mock + +from unittest import TestCase + +from Products.Jobber.tests.utils import RedisLayer +from Products.ZenCollector.services.config import DeviceProxy + +from ..cache import DeviceKey, DeviceRecord +from ..cache.storage import DeviceConfigStore +from ..tasks.deviceconfig import buildDeviceConfig + +PATH = { + "zenjobs": "Products.Jobber.zenjobs", + "task": "Products.ZenCollector.configcache.tasks.deviceconfig", +} + + +class TestBuildDeviceConfig(TestCase): + layer = RedisLayer + + def setUp(t): + t.device_name = "qadevice" + t.device_uid = "/zport/dmd/Devices/Server/Linux/devices/qadevice" + t.store = DeviceConfigStore(t.layer.redis) + + def tearDown(t): + del t.store + + @mock.patch("{task}.time".format(**PATH), autospec=True) + @mock.patch("{task}.createObject".format(**PATH), autospec=True) + @mock.patch("{task}.resolve".format(**PATH), autospec=True) + def test_no_config_built(t, _resolve, _createObject, _time): + monitor = "localhost" + clsname = "Products.ZenHub.services.PingService.PingService" + svcname = clsname.rsplit(".", 1)[0] + submitted = 123456.34 + svcclass = mock.Mock() + svc = mock.MagicMock() + dmd = mock.Mock() + log = mock.Mock() + dvc = mock.Mock() + key = DeviceKey(svcname, monitor, t.device_name) + + _createObject.return_value = t.store + _resolve.return_value = svcclass + svcclass.return_value = svc + svc.remote_getDeviceConfigs.return_value = [] + dmd.Devices.findDeviceByIdExact.return_value = dvc + dvc.getPrimaryId.return_value = t.device_uid + _time.return_value = submitted + 10 + dvc.getZ.return_value = 1000 + + t.store.set_pending((key, submitted)) + + buildDeviceConfig(dmd, log, monitor, t.device_name, clsname, submitted) + + status = t.store.get_status(key) + t.assertIsNone(status) + + @mock.patch("{task}.createObject".format(**PATH), autospec=True) + @mock.patch("{task}.resolve".format(**PATH), autospec=True) + def test_device_not_found(t, _resolve, _createObject): + monitor = "localhost" + clsname = "Products.ZenHub.services.PingService.PingService" + svcname = clsname.rsplit(".", 1)[0] + submitted = 123456.34 + svcclass = mock.Mock() + svc = mock.MagicMock() + dmd = mock.Mock() + log = mock.Mock() + key = DeviceKey(svcname, monitor, t.device_name) + + _createObject.return_value = t.store + _resolve.return_value = svcclass + svcclass.return_value = svc + dmd.Devices.findDeviceByIdExact.return_value = None + + t.store.set_pending((key, submitted)) + + buildDeviceConfig(dmd, log, monitor, t.device_name, clsname, submitted) + + status = t.store.get_status(key) + t.assertIsNone(status) + + @mock.patch("{task}.createObject".format(**PATH), autospec=True) + @mock.patch("{task}.resolve".format(**PATH), autospec=True) + def test_device_reidentified(t, _resolve, _createObject): + # A 're-identified' device will no longer be found in ZODB under its + # old ID, but a config keyed for the old ID will still exist. + monitor = "localhost" + clsname = "Products.ZenHub.services.PingService.PingService" + svcname = clsname.rsplit(".", 1)[0] + proxy = DeviceProxy() + submitted = 123456.34 + record = DeviceRecord.make( + svcname, + monitor, + t.device_name, + t.device_uid, + submitted - 300, + proxy, + ) + key = record.key + t.store.add(record) + + svcclass = mock.Mock() + svc = mock.MagicMock() + dmd = mock.Mock() + log = mock.Mock() + + _createObject.return_value = t.store + _resolve.return_value = svcclass + svcclass.return_value = svc + dmd.Devices.findDeviceByIdExact.return_value = None + + t.store.set_pending((key, submitted)) + + buildDeviceConfig(dmd, log, monitor, t.device_name, clsname, submitted) + + status = t.store.get_status(key) + t.assertIsNone(status) diff --git a/Products/ZenCollector/configcache/tests/test_deviceconfigstore.py b/Products/ZenCollector/configcache/tests/test_deviceconfigstore.py new file mode 100644 index 0000000000..dc3f3b263a --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_deviceconfigstore.py @@ -0,0 +1,1241 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections + +from unittest import TestCase + +import six + +from Products.ZenCollector.services.config import DeviceProxy +from Products.Jobber.tests.utils import subTest, RedisLayer + +from ..cache import DeviceKey, DeviceQuery, DeviceRecord, ConfigStatus +from ..cache.storage import DeviceConfigStore + + +class EmptyDeviceConfigStoreTest(TestCase): + """Test an empty DeviceConfigStore object.""" + + layer = RedisLayer + + def setUp(t): + t.store = DeviceConfigStore(t.layer.redis) + + def tearDown(t): + del t.store + + def test_search(t): + t.assertIsInstance(t.store.search(), collections.Iterable) + t.assertTupleEqual(tuple(t.store.search()), ()) + + def test_get_with_default_default(t): + key = DeviceKey("a", "b", "c") + t.assertIsNone(t.store.get(key)) + + def test_get_with_nondefault_default(t): + key = DeviceKey("a", "b", "c") + dflt = object() + t.assertEqual(t.store.get(key, dflt), dflt) + + def test_remove(t): + t.assertIsNone(t.store.remove()) + + def test_get_status_unknown_key(t): + key = DeviceKey("a", "b", "c") + result = t.store.get_status(key) + t.assertIsNone(result) + + def test_get_pending(t): + result = t.store.get_pending() + t.assertIsInstance(result, collections.Iterable) + t.assertTupleEqual(tuple(result), ()) + + def test_get_older(t): + result = t.store.get_older(1.0) + t.assertIsInstance(result, collections.Iterable) + t.assertTupleEqual(tuple(result), ()) + + def test_get_newer(t): + result = t.store.get_newer(1.0) + t.assertIsInstance(result, collections.Iterable) + t.assertTupleEqual(tuple(result), ()) + + def test_search_badarg(t): + with t.assertRaises(TypeError): + t.store.search("blargh") + + +class NoConfigTest(TestCase): + """Test statuses when no config is present.""" + + layer = RedisLayer + + key = DeviceKey("a", "b", "c") + now = 12345.0 + + def setUp(t): + t.store = DeviceConfigStore(t.layer.redis) + + def tearDown(t): + del t.store + + def test_current_status(t): + t.assertIsNone(t.store.get_status(t.key)) + + def test_search_with_status(t): + t.store.set_pending((t.key, t.now)) + t.assertEqual(0, len(tuple(t.store.search()))) + + def test_retired(t): + expected = ConfigStatus.Retired(t.key, t.now) + t.store.set_retired((t.key, t.now)) + status = t.store.get_status(t.key) + t.assertEqual(expected, status) + + def test_expired(t): + expected = ConfigStatus.Expired(t.key, t.now) + t.store.set_expired((t.key, t.now)) + status = t.store.get_status(t.key) + t.assertEqual(expected, status) + + def test_pending(t): + expected = ConfigStatus.Pending(t.key, t.now) + t.store.set_pending((t.key, t.now)) + status = t.store.get_status(t.key) + t.assertEqual(expected, status) + + def test_building(t): + expected = ConfigStatus.Building(t.key, t.now) + t.store.set_building((t.key, t.now)) + status = t.store.get_status(t.key) + t.assertEqual(expected, status) + + +_values = collections.namedtuple( + "_values", "service monitor device uid updated" +) + + +class _BaseTest(TestCase): + # Base class to share setup code + + layer = RedisLayer + + values = ( + _values("a", "b", "c1", "/c1", 1234500.0), + _values("a", "b", "c2", "/c2", 1234550.0), + ) + + def setUp(t): + DeviceProxy.__eq__ = _compare_configs + t.store = DeviceConfigStore(t.layer.redis) + t.config1 = _make_config("test1", "_test1", "abc-test-01") + t.config2 = _make_config("test2", "_test2", "abc-test-02") + t.record1 = DeviceRecord.make( + t.values[0].service, + t.values[0].monitor, + t.values[0].device, + t.values[0].uid, + t.values[0].updated, + t.config1, + ) + t.record2 = DeviceRecord.make( + t.values[1].service, + t.values[1].monitor, + t.values[1].device, + t.values[1].uid, + t.values[1].updated, + t.config2, + ) + + def tearDown(t): + del t.store + del t.config1 + del t.config2 + del t.record1 + del t.record2 + del DeviceProxy.__eq__ + + +class ConfigStoreAddTest(_BaseTest): + """Test the `add` method of DeviceConfigStore.""" + + def test_add_new_config(t): + t.store.add(t.record1) + t.store.add(t.record2) + expected1 = DeviceKey( + t.values[0].service, + t.values[0].monitor, + t.values[0].device, + ) + expected2 = DeviceKey( + t.values[1].service, + t.values[1].monitor, + t.values[1].device, + ) + result = tuple(t.store.search()) + t.assertEqual(2, len(result)) + t.assertIn(expected1, result) + t.assertIn(expected2, result) + + result = t.store.get(t.record1.key) + t.assertIsInstance(result, DeviceRecord) + t.assertEqual(t.record1, result) + + result = t.store.get(t.record2.key) + t.assertIsInstance(result, DeviceRecord) + t.assertEqual(t.record2, result) + + +class ConfigStoreSearchTest(_BaseTest): + """Test the `search` method of DeviceConfigStore.""" + + def test_negative_search(t): + t.store.add(t.record1) + cases = ( + {"service": "x"}, + {"service": "x", "monitor": "y"}, + {"service": "x", "monitor": "y", "device": "z"}, + {"monitor": "y"}, + {"monitor": "y", "device": "z"}, + {"device": "z"}, + ) + for case in cases: + with subTest(key=case): + result = tuple(t.store.search(DeviceQuery(**case))) + t.assertTupleEqual((), result) + + def test_positive_search_single(t): + t.store.add(t.record1) + f0 = t.values[0] + cases = ( + {"service": f0.service}, + {"service": f0.service, "monitor": f0.monitor}, + { + "service": f0.service, + "monitor": f0.monitor, + "device": f0.device, + }, + {"monitor": f0.monitor}, + {"monitor": f0.monitor, "device": f0.device}, + {"device": f0.device}, + ) + for case in cases: + with subTest(key=case): + result = tuple(t.store.search(DeviceQuery(**case))) + t.assertTupleEqual((t.record1.key,), result) + + def test_positive_search_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + f0 = t.values[0] + cases = ( + ({"service": f0.service}, 2), + ({"service": f0.service, "monitor": f0.monitor}, 2), + ( + { + "service": f0.service, + "monitor": f0.monitor, + "device": f0.device, + }, + 1, + ), + ({"monitor": f0.monitor}, 2), + ({"monitor": f0.monitor, "device": f0.device}, 1), + ({"device": f0.device}, 1), + ) + for args, count in cases: + with subTest(key=args): + result = tuple(t.store.search(DeviceQuery(**args))) + t.assertEqual(count, len(result)) + + +class ConfigStoreGetStatusTest(_BaseTest): + """Test the `get_status` method of DeviceConfigStore.""" + + def test_get_status(t): + t.store.add(t.record1) + t.store.add(t.record2) + + status = t.store.get_status(t.record1.key) + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.values[0].updated, status.updated) + + status = t.store.get_status(t.record2.key) + t.assertEqual(t.record2.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.values[1].updated, status.updated) + + +class ConfigStoreGetOlderTest(_BaseTest): + """Test the `get_older` method of DeviceConfigStore.""" + + def test_get_older_less_single(t): + t.store.add(t.record1) + result = tuple(t.store.get_older(t.record1.updated - 1)) + t.assertEqual(0, len(result)) + + def test_get_older_less_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + + result = tuple(t.store.get_older(t.record1.updated - 1)) + t.assertEqual(0, len(result)) + + result = tuple(t.store.get_older(t.record2.updated - 1)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertEqual(t.record1.updated, status.updated) + + def test_get_older_equal_single(t): + t.store.add(t.record1) + result = tuple(t.store.get_older(t.record1.updated)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + def test_get_older_equal_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + + result = tuple(t.store.get_older(t.record1.updated)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + result = sorted( + t.store.get_older(t.record2.updated), key=lambda x: x.updated + ) + t.assertEqual(2, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + status = result[1] + t.assertEqual(t.record2.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record2.updated, status.updated) + + def test_get_older_greater_single(t): + t.store.add(t.record1) + result = tuple(t.store.get_older(t.record1.updated + 1)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + def test_get_older_greater_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + + result = tuple(t.store.get_older(t.record1.updated + 1)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + result = sorted( + t.store.get_older(t.record2.updated + 1), key=lambda x: x.updated + ) + t.assertEqual(2, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + status = result[1] + t.assertEqual(t.record2.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record2.updated, status.updated) + + +class ConfigStoreGetNewerTest(_BaseTest): + """Test the `get_newer` method of DeviceConfigStore.""" + + def test_get_newer_less_single(t): + t.store.add(t.record1) + result = tuple(t.store.get_newer(t.record1.updated - 1)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + def test_get_newer_less_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + + result = sorted( + t.store.get_newer(t.record1.updated - 1), key=lambda x: x.updated + ) + t.assertEqual(2, len(result)) + status = result[0] + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + status = result[1] + t.assertEqual(t.record2.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record2.updated, status.updated) + + def test_get_newer_equal_single(t): + t.store.add(t.record1) + result = tuple(t.store.get_newer(t.record1.updated)) + t.assertEqual(0, len(result)) + + def test_get_newer_equal_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + + result = tuple(t.store.get_newer(t.record1.updated)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record2.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record2.updated, status.updated) + + def test_get_newer_greater_single(t): + t.store.add(t.record1) + result = tuple(t.store.get_newer(t.record1.updated + 1)) + t.assertEqual(0, len(result)) + + def test_get_newer_greater_multiple(t): + t.store.add(t.record1) + t.store.add(t.record2) + result = tuple(t.store.get_newer(t.record1.updated + 1)) + t.assertEqual(1, len(result)) + status = result[0] + t.assertEqual(t.record2.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record2.updated, status.updated) + + +class SetStatusOnceTest(_BaseTest): + """ + Test the behavior when a set_ method is called once. + """ + + def test_retired_once(t): + ts = t.record1.updated + 100 + expected = ConfigStatus.Retired(t.record1.key, ts) + t.store.set_retired((t.record1.key, ts)) + + actual = next(t.store.get_retired(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + def test_expired_once(t): + ts = t.record1.updated + 100 + expected = ConfigStatus.Expired(t.record1.key, ts) + t.store.set_expired((t.record1.key, ts)) + + actual = next(t.store.get_expired(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + def test_pending_once(t): + ts = t.record1.updated + 100 + expected = ConfigStatus.Pending(t.record1.key, ts) + t.store.set_pending((t.record1.key, ts)) + + actual = next(t.store.get_pending(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + def test_building_once(t): + ts = t.record1.updated + 100 + expected = ConfigStatus.Building(t.record1.key, ts) + t.store.set_building((t.record1.key, ts)) + + actual = next(t.store.get_building(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + + +class SetStatusTwiceTest(_BaseTest): + """ + Test the behavior when a set_ method is called twice + with different timestamp values. + """ + + def test_retired_twice(t): + ts1 = t.record1.updated + 100 + ts2 = t.record1.updated + 200 + expected = ConfigStatus.Retired(t.record1.key, ts2) + t.store.set_retired((t.record1.key, ts1)) + t.store.set_retired((t.record1.key, ts2)) + + actual = next(t.store.get_retired(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + def test_expired_twice(t): + ts1 = t.record1.updated + 100 + ts2 = t.record1.updated + 200 + expected = ConfigStatus.Expired(t.record1.key, ts2) + t.store.set_expired((t.record1.key, ts1)) + t.store.set_expired((t.record1.key, ts2)) + + actual = next(t.store.get_expired(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + def test_pending_twice(t): + ts1 = t.record1.updated + 100 + ts2 = t.record1.updated + 200 + expected = ConfigStatus.Pending(t.record1.key, ts2) + t.store.set_pending((t.record1.key, ts1)) + t.store.set_pending((t.record1.key, ts2)) + + actual = next(t.store.get_pending(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + def test_building_twice(t): + ts1 = t.record1.updated + 100 + ts2 = t.record1.updated + 200 + expected = ConfigStatus.Building(t.record1.key, ts2) + t.store.set_building((t.record1.key, ts1)) + t.store.set_building((t.record1.key, ts2)) + + actual = next(t.store.get_building(), None) + t.assertEqual(expected, actual) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + + +class TestCurrentOnlyMethods(_BaseTest): + """ + Verify that the get_older and get_newer methods work for Current status. + """ + + def test_older_with_current(t): + t.store.add(t.record1) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Current) + + older = next(t.store.get_older(t.record1.updated), None) + t.assertEqual(status, older) + + def test_older_with_retired(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_retired((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Retired) + + older = next(t.store.get_older(t.record1.updated), None) + t.assertIsNone(older) + + def test_older_with_expired(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_expired((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Expired) + + older = next(t.store.get_older(t.record1.updated), None) + t.assertIsNone(older) + + def test_older_with_pending(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_pending((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Pending) + + older = next(t.store.get_older(t.record1.updated), None) + t.assertIsNone(older) + + def test_older_with_building(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_building((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Building) + + older = next(t.store.get_older(t.record1.updated), None) + t.assertIsNone(older) + + def test_newer_with_current(t): + t.store.add(t.record1) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Current) + + newer = next(t.store.get_newer(t.record1.updated - 1), None) + t.assertEqual(status, newer) + + def test_newer_with_retired(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_retired((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Retired) + + newer = next(t.store.get_newer(t.record1.updated - 1), None) + t.assertIsNone(newer) + + def test_newer_with_expired(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_expired((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Expired) + + newer = next(t.store.get_newer(t.record1.updated - 1), None) + t.assertIsNone(newer) + + def test_newer_with_pending(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_pending((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Pending) + + newer = next(t.store.get_newer(t.record1.updated - 1), None) + t.assertIsNone(newer) + + def test_newer_with_building(t): + t.store.add(t.record1) + ts = t.record1.updated + 500 + t.store.set_building((t.record1.key, ts)) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Building) + + newer = next(t.store.get_newer(t.record1.updated - 1), None) + t.assertIsNone(newer) + + +class GetStatusTest(_BaseTest): + """ + Verify that get_status returns all the statuses. + """ + + def test_current(t): + t.store.add(t.record1) + expected = ConfigStatus.Current(t.record1.key, t.record1.updated) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + def test_retired(t): + t.store.add(t.record1) + ts = t.record1.updated + 100 + t.store.set_retired((t.record1.key, ts)) + expected = ConfigStatus.Retired(t.record1.key, ts) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + def test_expired(t): + t.store.add(t.record1) + ts = t.record1.updated + 200 + t.store.set_expired((t.record1.key, ts)) + expected = ConfigStatus.Expired(t.record1.key, ts) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + def test_pending(t): + t.store.add(t.record1) + ts = t.record1.updated + 300 + t.store.set_pending((t.record1.key, ts)) + expected = ConfigStatus.Pending(t.record1.key, ts) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + def test_building(t): + t.store.add(t.record1) + ts = t.record1.updated + 400 + t.store.set_building((t.record1.key, ts)) + expected = ConfigStatus.Building(t.record1.key, ts) + actual = t.store.get_status(t.record1.key) + t.assertEqual(expected, actual) + + +class TestClearStatus(_BaseTest): + """ + Test clearing the status. + """ + + def test_clear_from_current(t): + t.store.add(t.record1) + t.store.clear_status(t.record1.key) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Current) + + t.assertIsNone(next(t.store.get_retired(), None)) + t.assertIsNone(next(t.store.get_expired(), None)) + t.assertIsNone(next(t.store.get_pending(), None)) + t.assertIsNone(next(t.store.get_building(), None)) + + def test_clear_from_expired_to_current(t): + t.store.add(t.record1) + ts = t.record1.updated + 100 + t.store.set_expired((t.record1.key, ts)) + + t.store.clear_status(t.record1.key) + + status = t.store.get_status(t.record1.key) + t.assertIsInstance(status, ConfigStatus.Current) + + t.assertIsNone(next(t.store.get_retired(), None)) + t.assertIsNone(next(t.store.get_expired(), None)) + t.assertIsNone(next(t.store.get_pending(), None)) + t.assertIsNone(next(t.store.get_building(), None)) + + def test_clear_from_retired(t): + retired = t.record1.updated + 100 + t.store.set_retired((t.record1.key, retired)) + + t.store.clear_status(t.record1.key) + + t.assertIsNone(t.store.get_status(t.record1.key)) + t.assertIsNone(next(t.store.get_retired(), None)) + t.assertIsNone(next(t.store.get_expired(), None)) + t.assertIsNone(next(t.store.get_pending(), None)) + t.assertIsNone(next(t.store.get_building(), None)) + + def test_clear_from_expired(t): + ts = t.record1.updated + 100 + t.store.set_expired((t.record1.key, ts)) + + t.store.clear_status(t.record1.key) + + t.assertIsNone(t.store.get_status(t.record1.key)) + t.assertIsNone(next(t.store.get_retired(), None)) + t.assertIsNone(next(t.store.get_expired(), None)) + t.assertIsNone(next(t.store.get_pending(), None)) + t.assertIsNone(next(t.store.get_building(), None)) + + def test_clear_from_pending(t): + ts = t.record1.updated + 100 + t.store.set_pending((t.record1.key, ts)) + + t.store.clear_status(t.record1.key) + + t.assertIsNone(t.store.get_status(t.record1.key)) + t.assertIsNone(next(t.store.get_retired(), None)) + t.assertIsNone(next(t.store.get_expired(), None)) + t.assertIsNone(next(t.store.get_pending(), None)) + t.assertIsNone(next(t.store.get_building(), None)) + + def test_clear_from_building(t): + ts = t.record1.updated + 100 + t.store.set_building((t.record1.key, ts)) + + t.store.clear_status(t.record1.key) + + t.assertIsNone(t.store.get_status(t.record1.key)) + t.assertIsNone(next(t.store.get_retired(), None)) + t.assertIsNone(next(t.store.get_expired(), None)) + t.assertIsNone(next(t.store.get_pending(), None)) + t.assertIsNone(next(t.store.get_building(), None)) + + +class TestStatusChangesFromRetired(_BaseTest): + """ + Test changing the status of a config. + """ + + def test_retired_to_expired(t): + retired = t.record1.updated + 100 + t.store.set_retired((t.record1.key, retired)) + + expired = t.record1.updated + 300 + t.store.set_expired((t.record1.key, expired)) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Expired(t.record1.key, expired) + actual = next(t.store.get_expired(), None) + t.assertEqual(expected, actual) + + def test_retired_to_pending(t): + retired = t.record1.updated + 100 + t.store.set_retired((t.record1.key, retired)) + + pending = t.record1.updated + 300 + t.store.set_pending((t.record1.key, pending)) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Pending(t.record1.key, pending) + actual = next(t.store.get_pending(), None) + t.assertEqual(expected, actual) + + def test_retired_to_building(t): + retired = t.record1.updated + 100 + t.store.set_retired((t.record1.key, retired)) + + building = t.record1.updated + 300 + t.store.set_building((t.record1.key, building)) + + actual = next(t.store.get_retired(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Building(t.record1.key, building) + actual = next(t.store.get_building(), None) + t.assertEqual(expected, actual) + + +class TestStatusChangesFromExpired(_BaseTest): + """ + Test changing the status of a config. + """ + + def test_expired_to_retired(t): + expired = t.record1.updated + 100 + t.store.set_expired((t.record1.key, expired)) + + retired = t.record1.updated + 300 + t.store.set_retired((t.record1.key, retired)) + + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Retired(t.record1.key, retired) + actual = next(t.store.get_retired(), None) + t.assertEqual(expected, actual) + + def test_expired_to_pending(t): + expired = t.record1.updated + 100 + t.store.set_expired((t.record1.key, expired)) + + pending = t.record1.updated + 300 + t.store.set_pending((t.record1.key, pending)) + + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Pending(t.record1.key, pending) + actual = next(t.store.get_pending(), None) + t.assertEqual(expected, actual) + + def test_expired_to_building(t): + expired = t.record1.updated + 100 + t.store.set_expired((t.record1.key, expired)) + + building = t.record1.updated + 300 + t.store.set_building((t.record1.key, building)) + + actual = next(t.store.get_expired(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Building(t.record1.key, building) + actual = next(t.store.get_building(), None) + t.assertEqual(expected, actual) + + +class TestStatusChangesFromPending(_BaseTest): + """ + Test changing the status of a config. + """ + + def test_pending_to_retired(t): + pending = t.record1.updated + 100 + t.store.set_pending((t.record1.key, pending)) + + retired = t.record1.updated + 300 + t.store.set_retired((t.record1.key, retired)) + + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Retired(t.record1.key, retired) + actual = next(t.store.get_retired(), None) + t.assertEqual(expected, actual) + + def test_pending_to_expired(t): + pending = t.record1.updated + 100 + t.store.set_pending((t.record1.key, pending)) + + expired = t.record1.updated + 300 + t.store.set_expired((t.record1.key, expired)) + + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Expired(t.record1.key, expired) + actual = next(t.store.get_expired(), None) + t.assertEqual(expected, actual) + + def test_pending_to_building(t): + pending = t.record1.updated + 100 + t.store.set_pending((t.record1.key, pending)) + + building = t.record1.updated + 300 + t.store.set_building((t.record1.key, building)) + + actual = next(t.store.get_pending(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Building(t.record1.key, building) + actual = next(t.store.get_building(), None) + t.assertEqual(expected, actual) + + +class TestStatusChangesFromBuilding(_BaseTest): + """ + Test changing the status of a config. + """ + + def test_building_to_retired(t): + building = t.record1.updated + 100 + t.store.set_building((t.record1.key, building)) + + retired = t.record1.updated + 300 + t.store.set_retired((t.record1.key, retired)) + + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Retired(t.record1.key, retired) + actual = next(t.store.get_retired(), None) + t.assertEqual(expected, actual) + + def test_building_to_expired(t): + building = t.record1.updated + 100 + t.store.set_building((t.record1.key, building)) + + expired = t.record1.updated + 300 + t.store.set_expired((t.record1.key, expired)) + + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Expired(t.record1.key, expired) + actual = next(t.store.get_expired(), None) + t.assertEqual(expected, actual) + + def test_building_to_pending(t): + building = t.record1.updated + 100 + t.store.set_building((t.record1.key, building)) + + pending = t.record1.updated + 300 + t.store.set_pending((t.record1.key, pending)) + + actual = next(t.store.get_building(), None) + t.assertIsNone(actual) + + expected = ConfigStatus.Pending(t.record1.key, pending) + actual = next(t.store.get_pending(), None) + t.assertEqual(expected, actual) + + +class TestAddTransitions(_BaseTest): + """ + Test status changes after adding a config. + """ + + def test_add_overwrites_retired(t): + t.store.add(t.record1) + retired = t.record1.updated + 100 + t.store.set_retired((t.record1.key, retired)) + t.store.add(t.record1) + + retired_keys = tuple(t.store.get_retired()) + t.assertTupleEqual((), retired_keys) + + status = t.store.get_status(t.record1.key) + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + def test_add_overwrites_expired(t): + t.store.add(t.record1) + ts = t.record1.updated + 300 + t.store.set_expired((t.record1.key, ts)) + t.store.add(t.record1) + + expired_keys = tuple(t.store.get_expired()) + t.assertTupleEqual((), expired_keys) + + status = t.store.get_status(t.record1.key) + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + def test_add_overwrites_pending(t): + t.store.add(t.record1) + ts = t.record1.updated + 300 + submitted = t.record1.updated + 500 + t.store.set_expired((t.record1.key, ts)) + t.store.set_pending((t.record1.key, submitted)) + t.store.add(t.record1) + + expired_keys = tuple(t.store.get_expired()) + t.assertTupleEqual((), expired_keys) + + pending_keys = tuple(t.store.get_pending()) + t.assertTupleEqual((), pending_keys) + + status = t.store.get_status(t.record1.key) + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + def test_add_overwrites_building(t): + t.store.add(t.record1) + ts = t.record1.updated + 300 + started = t.record1.updated + 500 + t.store.set_expired((t.record1.key, ts)) + t.store.set_pending((t.record1.key, started - 100)) + t.store.set_building((t.record1.key, started)) + t.store.add(t.record1) + + expired_keys = tuple(t.store.get_expired()) + t.assertTupleEqual((), expired_keys) + + pending_keys = tuple(t.store.get_pending()) + t.assertTupleEqual((), pending_keys) + + building_keys = tuple(t.store.get_building()) + t.assertTupleEqual((), building_keys) + + status = t.store.get_status(t.record1.key) + t.assertEqual(t.record1.key, status.key) + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(t.record1.updated, status.updated) + + +class DeviceMonitorChangeTest(_BaseTest): + """ + Test when a device changes its monitor. + """ + + def test_add_monitor_change(t): + t.store.add(t.record1) + newmonitor = "b2" + updated = t.record1.updated + 1000 + newrecord = DeviceRecord.make( + t.record1.service, + newmonitor, + t.record1.device, + t.record1.uid, + updated, + t.record1.config, + ) + t.store.add(newrecord) + + result = t.store.get(t.record1.key) + t.assertIsNone(result) + + result = t.store.get(newrecord.key) + t.assertEqual(newrecord, result) + + +class GetUIDsTest(_BaseTest): + def test_get_uids_missing(t): + result = t.store.get_uids(t.values[0].device, t.values[1].device) + t.assertIsInstance(result, collections.Iterator) + result = sorted(result) + t.assertEqual(len(result), 2) + r1, r2 = result + t.assertEqual(r1[0], t.values[0].device) + t.assertIsNone(r1[1]) + t.assertEqual(r2[0], t.values[1].device) + t.assertIsNone(r2[1]) + + def test_get_uids_stored(t): + t.store.add(t.record1) + t.store.add(t.record2) + result = t.store.get_uids(t.values[0].device, t.values[1].device) + result = sorted(result) + t.assertEqual(len(result), 2) + r1, r2 = result + t.assertEqual(r1[0], t.values[0].device) + t.assertEqual(r1[1], t.values[0].uid) + t.assertEqual(r2[0], t.values[1].device) + t.assertEqual(r2[1], t.values[1].uid) + + +class DeviceUIDTest(TestCase): + layer = RedisLayer + + def setUp(t): + t.device_name = "qadevice" + t.device_uid = "/zport/dmd/Devices/Server/Linux/devices/qadevice" + t.store = DeviceConfigStore(t.layer.redis) + t.config1 = _make_config("qadevice", "qadevice", "abc-test-01") + t.config2 = _make_config("qadevice", "qadevice", "abc-test-01") + t.record1 = DeviceRecord.make( + "snmp", + "localhost", + t.device_name, + t.device_uid, + 123456.23, + t.config1, + ) + t.record2 = DeviceRecord.make( + "ping", + "localhost", + t.device_name, + t.device_uid, + 123654.23, + t.config2, + ) + + def tearDown(t): + del t.store + del t.config1 + del t.config2 + del t.record1 + del t.record2 + + def test_uid(t): + t.store.add(t.record1) + t.store.add(t.record2) + + device_uid = t.store.get_uid(t.device_name) + t.assertEqual(t.device_uid, device_uid) + + records = tuple( + t.store.get(key) + for key in t.store.search(DeviceQuery(device=t.device_name)) + ) + + t.assertEqual(2, len(records)) + t.assertNotEqual(records[0], records[1]) + t.assertEqual(records[0].uid, records[1].uid) + t.assertEqual(t.device_uid, records[0].uid) + + def test_uid_after_one_removal(t): + t.store.add(t.record1) + t.store.add(t.record2) + t.store.remove(t.record1.key) + + actual = t.store.get_uid(t.device_name) + t.assertEqual(t.device_uid, actual) + + records = tuple( + t.store.get(key) + for key in t.store.search(DeviceQuery(device=t.device_name)) + ) + t.assertEqual(1, len(records)) + t.assertEqual(t.device_uid, records[0].uid) + + def test_uid_after_removing_all(t): + t.store.add(t.record1) + t.store.add(t.record2) + t.store.remove(t.record1.key, t.record2.key) + + records = tuple( + t.store.get(key) + for key in t.store.search(DeviceQuery(device=t.device_name)) + ) + t.assertEqual(0, len(records)) + t.assertIsNone(t.store.get_uid(t.device_name)) + + +def _make_config(_id, configId, guid): + config = DeviceProxy() + config.id = _id + config._config_id = configId + config._device_guid = guid + config.data = six.ensure_text("𝗳ӓꞥϲỷ") + return config + + +def _compare_configs(self, cfg): + # _compare_configs used to monkeypatch DeviceProxy + # to make equivalent instances equal. + return all( + ( + self.id == cfg.id, + self._config_id == cfg._config_id, + self._device_guid == cfg._device_guid, + ) + ) diff --git a/Products/ZenCollector/configcache/tests/test_deviceupdatehandler.py b/Products/ZenCollector/configcache/tests/test_deviceupdatehandler.py new file mode 100644 index 0000000000..c2cf8fd65e --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_deviceupdatehandler.py @@ -0,0 +1,166 @@ + +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2019, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from unittest import TestCase + +from mock import Mock, patch + +from ..cache import DeviceKey, ConfigStatus +from ..cache.storage import DeviceConfigStore +from ..dispatcher import DeviceConfigTaskDispatcher +from ..handlers import DeviceUpdateHandler + + +PATH = {"src": "Products.ZenCollector.configcache.handlers"} + + +class DeviceUpdateHandlerTest(TestCase): + """Test the DeviceUpdateHandler object.""" + + def setUp(t): + t.store = Mock(DeviceConfigStore) + t.dispatcher = Mock(DeviceConfigTaskDispatcher) + t.dispatcher.service_names = ("ServiceA", "ServiceB") + t.log = Mock(logging.getLogger("zen")) + t.handler = DeviceUpdateHandler(t.log, t.store, t.dispatcher) + + def tearDown(t): + del t.handler + del t.log + del t.dispatcher + del t.store + + @patch("{src}.time".format(**PATH), autospec=True) + def test_current_to_expired(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'b', 'c2') + updated1 = 33330.0 + updated2 = 33331.0 + now = 34000.0 + _time.time.return_value = now + + status1 = ConfigStatus.Current(key1, updated1) + status2 = ConfigStatus.Current(key2, updated2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 100.0) + + t.store.set_retired.assert_called_with() + t.store.set_expired.assert_called_with((key2, now), (key1, now)) + + @patch("{src}.time".format(**PATH), autospec=True) + def test_current_to_retired(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'b', 'c2') + updated1 = 33330.0 + updated2 = 33331.0 + now = 34000.0 + _time.time.return_value = now + + status1 = ConfigStatus.Current(key1, updated1) + status2 = ConfigStatus.Current(key2, updated2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 1000.0) + + t.store.set_retired.assert_called_with((key2, now), (key1, now)) + t.store.set_expired.assert_called_with() + + @patch("{src}.time".format(**PATH), autospec=True) + def test_current_to_retired_and_expired(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'd', 'c2') + updated1 = 33330.0 + updated2 = 32331.0 + now = 34000.0 + _time.time.return_value = now + + status1 = ConfigStatus.Current(key1, updated1) + status2 = ConfigStatus.Current(key2, updated2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 1000.0) + + t.store.set_retired.assert_called_with((key1, now)) + t.store.set_expired.assert_called_with((key2, now)) + + @patch("{src}.time".format(**PATH), autospec=True) + def test_pending_to_expired(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'b', 'c2') + updated1 = 33330.0 + updated2 = 32331.0 + now = 34000.0 + _time.time.return_value = now + + status1 = ConfigStatus.Pending(key1, updated1) + status2 = ConfigStatus.Pending(key2, updated2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 1000.0) + + t.store.set_retired.assert_called_with() + t.store.set_expired.assert_called_with((key2, now), (key1, now)) + + @patch("{src}.time".format(**PATH), autospec=True) + def test_only_expired(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'b', 'c2') + expired1 = 33330.0 + expired2 = 33331.0 + _time.time.return_value = 34000.0 + + status1 = ConfigStatus.Expired(key1, expired1) + status2 = ConfigStatus.Expired(key2, expired2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 100.0) + + t.store.set_retired.assert_called_with() + t.store.set_expired.assert_called_with() + + @patch("{src}.time".format(**PATH), autospec=True) + def test_only_retired(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'b', 'c2') + expired1 = 33330.0 + expired2 = 33331.0 + _time.time.return_value = 34000.0 + + status1 = ConfigStatus.Retired(key1, expired1) + status2 = ConfigStatus.Retired(key2, expired2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 1000.0) + + t.store.set_retired.assert_called_with() + t.store.set_expired.assert_called_with() + + @patch("{src}.time".format(**PATH), autospec=True) + def test_only_building(t, _time): + key1 = DeviceKey('a', 'b', 'c1') + key2 = DeviceKey('a', 'b', 'c2') + expired1 = 33330.0 + expired2 = 33331.0 + _time.time.return_value = 34000.0 + now = 34000.0 + + status1 = ConfigStatus.Building(key1, expired1) + status2 = ConfigStatus.Building(key2, expired2) + t.store.get_status.side_effect = (status1, status2) + + t.handler((key1, key2), 1000.0) + + t.store.set_retired.assert_called_with() + t.store.set_expired.assert_called_with((key2, now), (key1, now)) diff --git a/Products/ZenCollector/configcache/tests/test_dispatcher.py b/Products/ZenCollector/configcache/tests/test_dispatcher.py new file mode 100644 index 0000000000..ed1104f28e --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_dispatcher.py @@ -0,0 +1,89 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2019, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from unittest import TestCase + +from mock import call, patch + +from ..dispatcher import DeviceConfigTaskDispatcher, build_device_config + + +PATH = {"src": "Products.ZenCollector.configcache.dispatcher"} + + +class DeviceConfigTaskDispatcherTest(TestCase): + """Test the DeviceConfigTaskDispatcher object.""" + + def setUp(t): + t.class_a = type( + "a", (object,), {"__module__": "some.path.one", "__name__": "a"} + ) + t.class_a_name = ".".join((t.class_a.__module__, t.class_a.__name__)) + t.class_b = type( + "b", (object,), {"__module__": "some.path.two", "__name__": "b"} + ) + t.class_b_name = ".".join((t.class_b.__module__, t.class_b.__name__)) + + t.bctd = DeviceConfigTaskDispatcher((t.class_a, t.class_b)) + + @patch.object(build_device_config, "apply_async") + def test_dispatch_all(t, _apply_async): + timeout = 100.0 + soft = 100.0 + hard = 110.0 + submitted = 111.0 + monitor = "local" + device = "linux" + t.bctd.dispatch_all(monitor, device, timeout, submitted) + + _apply_async.assert_has_calls( + ( + call( + args=(monitor, device, t.class_a_name), + kwargs={"submitted": submitted}, + soft_time_limit=soft, + time_limit=hard, + ), + call( + args=(monitor, device, t.class_b_name), + kwargs={"submitted": submitted}, + soft_time_limit=soft, + time_limit=hard, + ), + ) + ) + + @patch.object(build_device_config, "apply_async") + def test_dispatch(t, _apply_async): + timeout = 100.0 + soft = 100.0 + hard = 110.0 + submitted = 111.0 + monitor = "local" + device = "linux" + svcname = t.class_a.__module__ + t.bctd.dispatch(svcname, monitor, device, timeout, submitted) + + _apply_async.assert_called_once_with( + args=(monitor, device, t.class_a_name), + kwargs={"submitted": submitted}, + soft_time_limit=soft, + time_limit=hard, + ) + + def test_dispatch_unknown_service(t): + timeout = 100.0 + monitor = "local" + device = "linux" + submitted = 1111.0 + + with t.assertRaises(ValueError): + t.bctd.dispatch("unknown", monitor, device, timeout, submitted) diff --git a/Products/ZenCollector/configcache/tests/test_oidmapstore.py b/Products/ZenCollector/configcache/tests/test_oidmapstore.py new file mode 100644 index 0000000000..0a1105ca42 --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_oidmapstore.py @@ -0,0 +1,161 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections +import json +import time + +from hashlib import md5 +from unittest import TestCase + +from Products.Jobber.tests.utils import subTest, RedisLayer + +from ..cache import OidMapRecord, ConfigStatus +from ..cache.storage import OidMapStore + + +class EmptyOidMapStoreTest(TestCase): + """Test an empty OidMapStore object.""" + + layer = RedisLayer + + def setUp(t): + t.store = OidMapStore(t.layer.redis) + + def tearDown(t): + del t.store + + def test_nonzero(t): + t.assertFalse(t.store) + + def test_remove(t): + t.assertIsNone(t.store.remove()) + + def test_get_created(t): + t.assertIsNone(t.store.get_created()) + + def test_get_checksum(t): + t.assertIsNone(t.store.get_checksum()) + + def test_get_status(t): + result = t.store.get_status() + t.assertIsNone(result) + + def test_get_with_default_default(t): + t.assertIsNone(t.store.get()) + + def test_get_with_nondefault_default(t): + dflt = object() + t.assertEqual(t.store.get(dflt), dflt) + + +class OidMapStoreTest(TestCase): + + layer = RedisLayer + + def setUp(t): + t.store = OidMapStore(t.layer.redis) + t.created = time.time() + t.oidmap = {"1.1.1": "foo"} + t.checksum = md5( # noqa: S324 + json.dumps(t.oidmap, sort_keys=True).encode("utf-8") + ).hexdigest() + + def tearDown(t): + del t.store + + def test_add(t): + record = OidMapRecord(t.created, t.checksum, t.oidmap) + t.store.add(record) + + actual_record = t.store.get() + t.assertEqual(record, actual_record) + t.assertEqual(t.created, t.store.get_created()) + t.assertEqual(t.checksum, t.store.get_checksum()) + t.assertTrue(t.store) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(status.updated, t.created) + + def test_add_with_prior_status(t): + now = time.time() + t.store.set_pending(now) + + record = OidMapRecord(t.created, t.checksum, t.oidmap) + t.store.add(record) + + actual_record = t.store.get() + t.assertEqual(record, actual_record) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(status.updated, t.created) + + def test_put_when_empty(t): + record = OidMapRecord(t.created, t.checksum, t.oidmap) + t.store.put(record) + + actual_record = t.store.get() + t.assertEqual(record, actual_record) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Current) + t.assertEqual(status.updated, t.created) + + def test_put_with_prior_status(t): + now = time.time() + t.store.set_pending(now) + + record = OidMapRecord(t.created, t.checksum, t.oidmap) + t.store.put(record) + + actual_record = t.store.get() + t.assertEqual(record, actual_record) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Pending) + t.assertEqual(status.submitted, now) + + def test_set_expired(t): + now = time.time() + t.store.set_expired(now) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Expired) + t.assertEqual(status.expired, now) + + def test_set_pending(t): + now = time.time() + t.store.set_pending(now) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Pending) + t.assertEqual(status.submitted, now) + + def test_set_building(t): + now = time.time() + t.store.set_building(now) + + status = t.store.get_status() + t.assertIsInstance(status, ConfigStatus.Building) + t.assertEqual(status.started, now) + + def test_remove(t): + record = OidMapRecord(t.created, t.checksum, t.oidmap) + t.store.add(record) + t.store.remove() + + t.assertIsNone(t.store.get()) + t.assertIsNone(t.store.get_status()) + t.assertIsNone(t.store.get_checksum()) + t.assertIsNone(t.store.get_created()) + t.assertFalse(t.store) diff --git a/Products/ZenCollector/configcache/tests/test_propertymap.py b/Products/ZenCollector/configcache/tests/test_propertymap.py new file mode 100644 index 0000000000..14442a69e3 --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_propertymap.py @@ -0,0 +1,80 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from unittest import TestCase + +from ..propertymap import DevicePropertyMap + + +class EmptyDevicePropertyMapTest(TestCase): + """Test an empty DevicePropertyMap object.""" + + def setUp(t): + t.dpm = DevicePropertyMap({}, None) + + def tearDown(t): + del t.dpm + + def test_get(t): + t.assertIsNone(t.dpm.get("/zport/dmd/Devices")) + + def test_smallest_value(t): + t.assertIsNone(t.dpm.smallest_value()) + + +class DevicePropertyMapTest(TestCase): + """Test a DevicePropertyMap object.""" + + mapping = { + "/zport/dmd/Devices": 10, + "/zport/dmd/Devices/Server/Linux": 11, + "/zport/dmd/Devices/Server/SSH/Linux/devices/my-device": 12, + "/zport/dmd/Devices/vSphere": 13, + "/zport/dmd/Devices/Network": 14, + } + + _default = 15 + + def setUp(t): + t.dpm = DevicePropertyMap(t.mapping, t._default) + + def tearDown(t): + del t.dpm + + def test_minimal_match(t): + value = t.dpm.get("/zport/dmd/Devices/Server-stuff/devices/dev2") + t.assertEqual(10, value) + + def test_get_exact_match(t): + value = t.dpm.get( + "/zport/dmd/Devices/Server/SSH/Linux/devices/my-device" + ) + t.assertEqual(12, value) + + def test_get_best_match(t): + value = t.dpm.get("/zport/dmd/Devices/Server/Linux/devices/dev3") + t.assertEqual(11, value) + + def test_no_match(t): + value = t.dpm.get("/Devices") + t.assertEqual(t._default, value) + + def test_empty_string(t): + value = t.dpm.get("") + t.assertEqual(t._default, value) + + def test_smallest_value(t): + value = t.dpm.smallest_value() + t.assertEqual(10, value) + + def test_uid_is_None(t): + value = t.dpm.get(None) + t.assertEqual(t._default, value) diff --git a/Products/ZenCollector/configcache/tests/test_propertymap_makers.py b/Products/ZenCollector/configcache/tests/test_propertymap_makers.py new file mode 100644 index 0000000000..2b455442d4 --- /dev/null +++ b/Products/ZenCollector/configcache/tests/test_propertymap_makers.py @@ -0,0 +1,275 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from Products.ZenTestCase.BaseTestCase import BaseTestCase + +from ..propertymap import DevicePropertyMap +from ..constants import Constants + + +class TestDevicePropertyMapTTLMakers(BaseTestCase): + ttl_overrides = { + "Server/Linux": 16320, + "Server/Linux/linux0": 68000, + "Power": 8000, + "Network": 32000, + } + + min_ttl_overrides = { + "Server/Linux/linux0": 300, + } + + def afterSetUp(t): + super(TestDevicePropertyMapTTLMakers, t).afterSetUp() + + t.dmd.Devices.createOrganizer("/Server/Linux") + t.dmd.Devices.createOrganizer("/Server/Cmd") + t.dmd.Devices.createOrganizer("/Network") + t.dmd.Devices.createOrganizer("/Power") + + t.dmd.Devices.Server.Linux.setZenProperty( + Constants.device_time_to_live_id, t.ttl_overrides["Server/Linux"] + ) + t.dmd.Devices.Power.setZenProperty( + Constants.device_time_to_live_id, t.ttl_overrides["Power"] + ) + t.dmd.Devices.Network.setZenProperty( + Constants.device_time_to_live_id, t.ttl_overrides["Network"] + ) + + t.linux_dev = t.dmd.Devices.Server.Linux.createInstance("linux0") + t.linux_dev.setZenProperty( + Constants.device_time_to_live_id, + t.ttl_overrides["Server/Linux/linux0"], + ) + t.linux_dev.setZenProperty( + Constants.device_minimum_time_to_live_id, + t.min_ttl_overrides["Server/Linux/linux0"], + ) + + t.cmd_dev = t.dmd.Devices.Server.Cmd.createInstance("cmd0") + + def test_make_ttl_map(t): + ttlmap = DevicePropertyMap.make_ttl_map(t.dmd.Devices) + + pathid = t.dmd.Devices.Server.getPrimaryId() + expected = Constants.device_time_to_live_value + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Server.Linux.getPrimaryId() + expected = t.ttl_overrides["Server/Linux"] + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Server.Cmd.getPrimaryId() + expected = Constants.device_time_to_live_value + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Power.getPrimaryId() + expected = t.ttl_overrides["Power"] + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Network.getPrimaryId() + expected = t.ttl_overrides["Network"] + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.linux_dev.getPrimaryId() + expected = t.ttl_overrides["Server/Linux/linux0"] + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.cmd_dev.getPrimaryId() + expected = Constants.device_time_to_live_value + actual = ttlmap.get(pathid) + t.assertEqual(expected, actual) + + def test_make_min_ttl_map(t): + minttlmap = DevicePropertyMap.make_minimum_ttl_map(t.dmd.Devices) + + pathid = t.dmd.Devices.Server.getPrimaryId() + expected = Constants.device_minimum_time_to_live_value + actual = minttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.linux_dev.getPrimaryId() + expected = t.min_ttl_overrides["Server/Linux/linux0"] + actual = minttlmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.cmd_dev.getPrimaryId() + expected = Constants.device_minimum_time_to_live_value + actual = minttlmap.get(pathid) + t.assertEqual(expected, actual) + + def test_large_min_ttl_value(t): + minttl_value = Constants.device_time_to_live_value + 100 + t.cmd_dev.setZenProperty( + Constants.device_minimum_time_to_live_id, minttl_value + ) + + minttlmap = DevicePropertyMap.make_minimum_ttl_map(t.dmd.Devices) + + pathid = t.cmd_dev.getPrimaryId() + expected = Constants.device_time_to_live_value + 100 + actual = minttlmap.get(pathid) + t.assertEqual(expected, actual) + + +class TestDevicePropertyMapPendingTimeout(BaseTestCase): + pending_overrides = { + "Server/Linux": 500, + "Server/Linux/linux0": 600, + "Power": 800, + "Network": 850, + } + + def afterSetUp(t): + super(TestDevicePropertyMapPendingTimeout, t).afterSetUp() + + t.dmd.Devices.createOrganizer("/Server/Linux") + t.dmd.Devices.createOrganizer("/Server/Cmd") + t.dmd.Devices.createOrganizer("/Network") + t.dmd.Devices.createOrganizer("/Power") + + t.dmd.Devices.Server.Linux.setZenProperty( + Constants.device_pending_timeout_id, + t.pending_overrides["Server/Linux"], + ) + t.dmd.Devices.Power.setZenProperty( + Constants.device_pending_timeout_id, t.pending_overrides["Power"] + ) + t.dmd.Devices.Network.setZenProperty( + Constants.device_pending_timeout_id, t.pending_overrides["Network"] + ) + + t.linux_dev = t.dmd.Devices.Server.Linux.createInstance("linux0") + t.linux_dev.setZenProperty( + Constants.device_pending_timeout_id, + t.pending_overrides["Server/Linux/linux0"], + ) + + t.cmd_dev = t.dmd.Devices.Server.Cmd.createInstance("cmd0") + + def test_make_pending_timeout_map(t): + pendingmap = DevicePropertyMap.make_pending_timeout_map(t.dmd.Devices) + + pathid = t.dmd.Devices.Server.getPrimaryId() + expected = Constants.device_pending_timeout_value + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Server.Linux.getPrimaryId() + expected = t.pending_overrides["Server/Linux"] + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Server.Cmd.getPrimaryId() + expected = Constants.device_pending_timeout_value + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Power.getPrimaryId() + expected = t.pending_overrides["Power"] + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Network.getPrimaryId() + expected = t.pending_overrides["Network"] + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.linux_dev.getPrimaryId() + expected = t.pending_overrides["Server/Linux/linux0"] + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.cmd_dev.getPrimaryId() + expected = Constants.device_pending_timeout_value + actual = pendingmap.get(pathid) + t.assertEqual(expected, actual) + + +class TestDevicePropertyMapBuildTimeout(BaseTestCase): + build_overrides = { + "Server/Linux": 500, + "Server/Linux/linux0": 600, + "Power": 800, + "Network": 850, + } + + def afterSetUp(t): + super(TestDevicePropertyMapBuildTimeout, t).afterSetUp() + + t.dmd.Devices.createOrganizer("/Server/Linux") + t.dmd.Devices.createOrganizer("/Server/Cmd") + t.dmd.Devices.createOrganizer("/Network") + t.dmd.Devices.createOrganizer("/Power") + + t.dmd.Devices.Server.Linux.setZenProperty( + Constants.device_build_timeout_id, + t.build_overrides["Server/Linux"], + ) + t.dmd.Devices.Power.setZenProperty( + Constants.device_build_timeout_id, t.build_overrides["Power"] + ) + t.dmd.Devices.Network.setZenProperty( + Constants.device_build_timeout_id, t.build_overrides["Network"] + ) + + t.linux_dev = t.dmd.Devices.Server.Linux.createInstance("linux0") + t.linux_dev.setZenProperty( + Constants.device_build_timeout_id, + t.build_overrides["Server/Linux/linux0"], + ) + + t.cmd_dev = t.dmd.Devices.Server.Cmd.createInstance("cmd0") + + def test_make_build_timeout_map(t): + buildmap = DevicePropertyMap.make_build_timeout_map(t.dmd.Devices) + + pathid = t.dmd.Devices.Server.getPrimaryId() + expected = Constants.device_build_timeout_value + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Server.Linux.getPrimaryId() + expected = t.build_overrides["Server/Linux"] + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Server.Cmd.getPrimaryId() + expected = Constants.device_build_timeout_value + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Power.getPrimaryId() + expected = t.build_overrides["Power"] + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.dmd.Devices.Network.getPrimaryId() + expected = t.build_overrides["Network"] + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.linux_dev.getPrimaryId() + expected = t.build_overrides["Server/Linux/linux0"] + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) + + pathid = t.cmd_dev.getPrimaryId() + expected = Constants.device_build_timeout_value + actual = buildmap.get(pathid) + t.assertEqual(expected, actual) diff --git a/Products/ZenCollector/configcache/utils/__init__.py b/Products/ZenCollector/configcache/utils/__init__.py new file mode 100644 index 0000000000..9511a52e76 --- /dev/null +++ b/Products/ZenCollector/configcache/utils/__init__.py @@ -0,0 +1,24 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .metrics import MetricReporter +from .pollers import RelStorageInvalidationPoller +from .services import getDeviceConfigServices +from .properties import DeviceProperties, OidMapProperties + + +__all__ = ( + "DeviceProperties", + "getDeviceConfigServices", + "MetricReporter", + "OidMapProperties", + "RelStorageInvalidationPoller", +) diff --git a/Products/ZenCollector/configcache/utils/metrics.py b/Products/ZenCollector/configcache/utils/metrics.py new file mode 100644 index 0000000000..82bae7882a --- /dev/null +++ b/Products/ZenCollector/configcache/utils/metrics.py @@ -0,0 +1,91 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import json +import logging + +import requests + +from Products.ZenUtils.controlplane import configuration as cc_config +from Products.ZenUtils.MetricReporter import DEFAULT_METRIC_URL, getMetricData + +log = logging.getLogger("zen.configcache.metrics") + + +class MetricReporter(object): + + def __init__(self, url=None, prefix="", tags=None): + if not url: + url = cc_config.consumer_url + if not url: + url = DEFAULT_METRIC_URL + self._url = url + self._prefix = prefix + tags = dict(tags if tags is not None else {}) + tags.update( + { + "serviceId": cc_config.service_id, + "instance": cc_config.instance_id, + "hostId": cc_config.host_id, + "tenantId": cc_config.tenant_id, + } + ) + self._tags = tags + self._session = None + self._instruments = {} + + def __contains__(self, name): + """Return True if `name` matches a registered metric.""" + return name in self._instruments + + def add_tags(self, tags): + self._tags.update(tags) + + def register(self, name, instrument): + self._instruments[name] = instrument + + def save(self, name=None): + metrics = list( + self._get_metrics( + self._instruments.keys() if name is None else (name,) + ) + ) + if not metrics: + return + self._post_metrics(metrics) + + def _post_metrics(self, metrics): + if self._session is None: + self._session = requests.Session() + self._session.headers.update( + { + "Content-Type": "application/json", + "User-Agent": "Zenoss Service Metrics", + } + ) + post_data = {"metrics": metrics} + log.debug("sending metric payload: %s", post_data) + response = self._session.post(self._url, data=json.dumps(post_data)) + if response.status_code != 200: + log.warning( + "problem submitting metrics: %d, %s", + response.status_code, + response.text.replace("\n", "\\n"), + ) + self._session = None + else: + log.debug("%d metrics posted", len(metrics)) + + def _get_metrics(self, names): + for name in names: + instrument = self._instruments.get(name) + data = getMetricData(instrument, name, self._tags, self._prefix) + if data: + for metric in data: + yield metric diff --git a/Products/ZenCollector/configcache/utils/pollers.py b/Products/ZenCollector/configcache/utils/pollers.py new file mode 100644 index 0000000000..db2501e41f --- /dev/null +++ b/Products/ZenCollector/configcache/utils/pollers.py @@ -0,0 +1,81 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +from itertools import chain + +from zope.component import getUtilitiesFor + +from Products.ZenHub.interfaces import IInvalidationFilter + +from ..modelchange import InvalidationProcessor + +log = logging.getLogger("zen.configcache") + + +class RelStorageInvalidationPoller(object): + """ + Wraps a :class:`relstorage.storage.RelStorage` object to provide an + API to return the latest database invalidations. + """ + + def __init__(self, storage, dmd): + """ + Initialize a RelStorageInvalidationPoller instance. + + :param storage: relstorage storage object + :type storage: :class:`relstorage.storage.RelStorage` + """ + self.__storage = storage + app = dmd.getPhysicalRoot() + filters = initialize_invalidation_filters(dmd) + self.__processor = InvalidationProcessor(app, filters) + + def poll(self): + """ + Return an iterable of ZODB objects that have changed since the last + time `poll` was called. + + :rtype: Iterable[ZODB object] + """ + oids = self.__storage.poll_invalidations() + if not oids: + return () + return set( + chain.from_iterable(self.__processor.apply(oid) for oid in oids) + ) + + +def initialize_invalidation_filters(ctx): + """ + Return initialized IInvalidationFilter objects in a list. + + :param ctx: Used to initialize the IInvalidationFilter objects. + :type ctx: DataRoot + :return: Initialized IInvalidationFilter objects + :rtype: List[IInvalidationFilter] + """ + try: + filters = (f for n, f in getUtilitiesFor(IInvalidationFilter)) + invalidation_filters = [] + for fltr in sorted(filters, key=lambda f: getattr(f, "weight", 100)): + fltr.initialize(ctx) + invalidation_filters.append(fltr) + for fltr in invalidation_filters: + log.info( + "using invalidation filter %s.%s", + fltr.__module__, + fltr.__class__.__name__ + ) + return invalidation_filters + except Exception: + log.exception("error in initialize_invalidation_filters") diff --git a/Products/ZenCollector/configcache/utils/properties.py b/Products/ZenCollector/configcache/utils/properties.py new file mode 100644 index 0000000000..85dd56dafa --- /dev/null +++ b/Products/ZenCollector/configcache/utils/properties.py @@ -0,0 +1,82 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from Products.ZenUtils.GlobalConfig import getGlobalConfiguration + +from ..constants import Constants + + +class OidMapProperties(object): + def __init__(self): + self._conf = getGlobalConfiguration() + + @property + def ttl(self): + return self._conf.getint( + Constants.oidmap_time_to_live_id, + Constants.oidmap_time_to_live_value, + ) + + @property + def pending_timeout(self): + return self._conf.getint( + Constants.oidmap_pending_timeout_id, + Constants.oidmap_pending_timeout_value, + ) + + @property + def build_timeout(self): + return self._conf.getint( + Constants.oidmap_build_timeout_id, + Constants.oidmap_build_timeout_value, + ) + + +class DeviceProperties(object): + def __init__(self, device): + self._device = device + + @property + def ttl(self): + return _getZProperty( + self._device, + Constants.device_time_to_live_id, + Constants.device_time_to_live_value, + ) + + @property + def minimum_ttl(self): + return _getZProperty( + self._device, + Constants.device_minimum_time_to_live_id, + Constants.device_minimum_time_to_live_value, + ) + + @property + def pending_timeout(self): + return _getZProperty( + self._device, + Constants.device_pending_timeout_id, + Constants.device_pending_timeout_value, + ) + + @property + def build_timeout(self): + return _getZProperty( + self._device, + Constants.device_build_timeout_id, + Constants.device_build_timeout_value, + ) + + +def _getZProperty(obj, propname, default): + value = obj.getZ(propname) + if value is None: + return default + return value diff --git a/Products/ZenCollector/configcache/utils/services.py b/Products/ZenCollector/configcache/utils/services.py new file mode 100644 index 0000000000..06988bec0e --- /dev/null +++ b/Products/ZenCollector/configcache/utils/services.py @@ -0,0 +1,118 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import importlib +import inspect +import itertools +import pathlib2 as pathlib + +import Products + +from Products.ZenCollector.services.config import CollectorConfigService + +_excluded_config_classes = ( + "NullConfigService", + "NullConfig", + "SnmpTrapConfig", +) + + +def mod_from_path(path): + """ + Returns the module path of the given path to a Python code file. + + The module path is the path to a file with a ".py" extension. + The package path is rooted at "Products" or "ZenPacks". + + >>> mod_from_path("/opt/zenoss/Products/ZenHub/services/ProcessConfig.py") + Products.ZenHub.services.ProcessConfig + + :param path: The module path + :type path: pathlib.Path + :returns: The package path + :rtype: pathlib.Path + """ + rpath = path.parts[::-1] # reverse the path + if "Products" in rpath: + offset = rpath.index("Products") + elif "ZenPacks" in rpath: + offset = rpath.index("ZenPacks") + return ".".join(itertools.chain(rpath[1 : offset + 1][::-1], [path.stem])) + + +def getConfigServicesFromModule(name): + """ + Returns a tuple containing all the config service classes in the module. + An empty tuple is returned if no config service classes are found. + + :param name: The full name of the module. + :type name: pathlib.Path + :returns: Tuple of Configuration service classes + :rtype: tuple[CollectorConfigService] + """ + try: + mod = importlib.import_module(name) + classes = ( + cls + for nm, cls in inspect.getmembers(mod, inspect.isclass) + if cls.__module__ == mod.__name__ + ) + # CollectorConfigService is excluded because it is the base + # class for all other configuration services and not used + # directly by any collection daemon. + return tuple( + cls + for cls in classes + if cls is not CollectorConfigService + and issubclass(cls, CollectorConfigService) + ) + except ImportError: + return () + + +def getDeviceConfigServices(): + """ + Returns a tuple containing all the installed config service classes. + An empty tuple is returned if no config service classes are found. + + Configuration service classes are expected to be found in modules + that found in a package named "services". The "services" package can + be found in multiple package paths. + + :returns: Tuple of configuration service classes + :rtype: tuple[CollectorConfigService] + """ + # defer import ZenPacks until here because it doesn't exist during + # an image build. + import ZenPacks + + search_paths = ( + pathlib.Path(p) + for p in itertools.chain(Products.__path__, ZenPacks.__path__) + ) + service_paths = ( + svcpath + for path in search_paths + for svcpath in path.rglob("**/services") + ) + module_names = ( + mod_from_path(codepath) + for path in service_paths + for codepath in path.rglob("*.py") + if codepath.stem != "__init__" and "tests" not in codepath.parts + ) + return tuple( + cls + for cls in itertools.chain.from_iterable( + getConfigServicesFromModule(name) for name in module_names + ) + if cls.__name__ not in _excluded_config_classes + ) diff --git a/Products/ZenCollector/configcache/version.py b/Products/ZenCollector/configcache/version.py new file mode 100644 index 0000000000..88ca314144 --- /dev/null +++ b/Products/ZenCollector/configcache/version.py @@ -0,0 +1,39 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import sys as _sys + +from .app.args import get_subparser + + +class Version(object): + + description = "Display the version and exit" + + @staticmethod + def add_arguments(parser, subparsers): + subp_version = get_subparser( + subparsers, "version", description=Version.description + ) + subp_version.set_defaults(factory=Version) + + def __init__(self, args): + pass + + def run(self): + from Products.ZenModel.ZenossInfo import ZenossInfo + + zinfo = ZenossInfo("") + version = zinfo.getZenossVersion().short() + print("{} {}".format(_app_name(), version)) + + +def _app_name(): + fn = _sys.argv[0].rsplit("/", 1)[-1] + return fn.rsplit(".", 1)[0] if fn.endswith(".py") else fn diff --git a/Products/ZenCollector/configure.zcml b/Products/ZenCollector/configure.zcml index 7118ff77ed..5e4a6fb977 100644 --- a/Products/ZenCollector/configure.zcml +++ b/Products/ZenCollector/configure.zcml @@ -1,11 +1,24 @@ + + - + + + - + diff --git a/Products/ZenCollector/cyberark.py b/Products/ZenCollector/cyberark.py index 19f1a09ed8..0791220011 100644 --- a/Products/ZenCollector/cyberark.py +++ b/Products/ZenCollector/cyberark.py @@ -13,6 +13,7 @@ import json import logging import os +import re import urlparse from twisted.internet import defer, reactor, ssl @@ -22,9 +23,9 @@ from zope.component import queryUtility from Products.ZenEvents import Event +from Products.ZenHub.interfaces import IEventService from Products.ZenUtils.GlobalConfig import getGlobalConfiguration -from .interfaces import IEventService from .ExpiringCache import ExpiringCache _CFG_URL = "cyberark-url" @@ -435,10 +436,18 @@ def request(self, query): defer.returnValue((response.code, result)) +_cert_pattern = re.compile( + r"(-{5}BEGIN CERTIFICATE-{5}.+?-{5}END CERTIFICATE-{5})", + re.MULTILINE | re.DOTALL +) + + def load_certificates(url, cert_path): hostname = unicode(urlparse.urlsplit(url).hostname) - authority = ssl.Certificate.loadPEM( - FilePath(os.path.join(cert_path, "RootCA.crt")).getContent() + cert_data = FilePath(os.path.join(cert_path, "RootCA.crt")).getContent() + authorities = ssl.trustRootFromCertificates( + ssl.Certificate.loadPEM(m.group()) + for m in _cert_pattern.finditer(cert_data) ) client_cert = FilePath(os.path.join(cert_path, "client.crt")).getContent() client_key = FilePath(os.path.join(cert_path, "client.pem")).getContent() @@ -447,7 +456,7 @@ def load_certificates(url, cert_path): ) return ssl.optionsForClientTLS( hostname, - trustRoot=authority, + trustRoot=authorities, clientCertificate=client_certificate, ) diff --git a/Products/ZenCollector/daemon.py b/Products/ZenCollector/daemon.py index 2533ad3489..e5e5223d84 100644 --- a/Products/ZenCollector/daemon.py +++ b/Products/ZenCollector/daemon.py @@ -7,324 +7,243 @@ # ############################################################################## -import signal -import time -import logging +import itertools import json import re +import signal +import time from optparse import SUPPRESS_HELP -import zope.interface +import attr from metrology import Metrology from metrology.instruments import Gauge from twisted.internet import defer, reactor, task -from twisted.python.failure import Failure -from zope.component import getUtilitiesFor +from zope.component import ( + getUtilitiesFor, + provideUtility, + queryUtility, + getUtility, +) +from zope.interface import implementer + +import Products.ZenCollector as ZENCOLLECTOR_MODULE -from Products.ZenHub.PBDaemon import PBDaemon, FakeRemote from Products.ZenRRD.RRDDaemon import RRDDaemon from Products.ZenUtils import metrics from Products.ZenUtils.deprecated import deprecated from Products.ZenUtils.observable import ObservableProxy -from Products.ZenUtils.picklezipper import Zipper -from Products.ZenUtils.Utils import importClass, unused +from Products.ZenUtils.Utils import load_config +from .config import ( + ConfigurationLoaderTask, + ManyDeviceConfigLoader, + SingleDeviceConfigLoader, +) from .interfaces import ( ICollector, ICollectorPreferences, IConfigurationDispatchingFilter, IConfigurationListener, IDataService, - IEventService, IFrameworkFactory, - IStatistic, IStatisticsService, ITaskSplitter, ) -from .utils.maintenance import MaintenanceCycle - -log = logging.getLogger("zen.daemon") - - -@zope.interface.implementer(IConfigurationListener) -class DummyListener(object): - def deleted(self, configurationId): - """ - Called when a configuration is deleted from the collector - """ - log.debug("DummyListener: configuration %s deleted", configurationId) - - def added(self, configuration): - """ - Called when a configuration is added to the collector - """ - log.debug("DummyListener: configuration %s added", configuration) - - def updated(self, newConfiguration): - """ - Called when a configuration is updated in collector - """ - log.debug("DummyListener: configuration %s updated", newConfiguration) - - -@zope.interface.implementer(IConfigurationListener) -class ConfigListenerNotifier(object): - - _listeners = [] - - def addListener(self, listener): - self._listeners.append(listener) - - def deleted(self, configurationId): - """ - Called when a configuration is deleted from the collector - """ - for listener in self._listeners: - listener.deleted(configurationId) - - def added(self, configuration): - """ - Called when a configuration is added to the collector - """ - for listener in self._listeners: - listener.added(configuration) - - def updated(self, newConfiguration): - """ - Called when a configuration is updated in collector - """ - for listener in self._listeners: - listener.updated(newConfiguration) - - -@zope.interface.implementer(IConfigurationListener) -class DeviceGuidListener(object): - def __init__(self, daemon): - self._daemon = daemon - - def deleted(self, configurationId): - """ - Called when a configuration is deleted from the collector - """ - self._daemon._deviceGuids.pop(configurationId, None) - - def added(self, configuration): - """ - Called when a configuration is added to the collector - """ - deviceGuid = getattr(configuration, "deviceGuid", None) - if deviceGuid: - self._daemon._deviceGuids[configuration.id] = deviceGuid - - def updated(self, newConfiguration): - """ - Called when a configuration is updated in collector - """ - deviceGuid = getattr(newConfiguration, "deviceGuid", None) - if deviceGuid: - self._daemon._deviceGuids[newConfiguration.id] = deviceGuid - +from .listeners import ConfigListenerNotifier +from .utils.maintenance import MaintenanceCycle, ZenHubHeartbeatSender -DUMMY_LISTENER = DummyListener() -CONFIG_LOADER_NAME = "configLoader" - -@zope.interface.implementer(ICollector, IDataService, IEventService) +@implementer(ICollector, IDataService) class CollectorDaemon(RRDDaemon): - """ - The daemon class for the entire ZenCollector framework. This class bridges - the gap between the older daemon framework and ZenCollector. New collectors - no longer should extend this class to implement a new collector. - """ + """The daemon class for the entire ZenCollector framework.""" - _frameworkFactoryName = "" + _frameworkFactoryName = "default" # type: str + """Identifies the IFrameworkFactory implementation to use.""" - @property - def preferences(self): - """ - Preferences for this daemon - """ - return self._prefs + _cacheServiceName = "Products.ZenCollector.services.ConfigCache" + initialServices = RRDDaemon.initialServices + [_cacheServiceName] def __init__( self, preferences, taskSplitter, - configurationListener=DUMMY_LISTENER, + configurationListener=None, initializationCallback=None, stoppingCallback=None, ): """ - Constructs a new instance of the CollectorDaemon framework. Normally - only a singleton instance of a CollectorDaemon should exist within a - process, but this is not enforced. - - @param preferences: the collector configuration - @type preferences: ICollectorPreferences - @param taskSplitter: the task splitter to use for this collector - @type taskSplitter: ITaskSplitter - @param initializationCallback: a callable that will be executed after - connection to the hub but before - retrieving configuration information - @type initializationCallback: any callable - @param stoppingCallback: a callable that will be executed first during - the stopping process. Exceptions will be - logged but otherwise ignored. - @type stoppingCallback: any callable - """ - # create the configuration first, so we have the collector name - # available before activating the rest of the Daemon class hierarchy. - if not ICollectorPreferences.providedBy(preferences): - raise TypeError("configuration must provide ICollectorPreferences") - else: - self._prefs = ObservableProxy(preferences) - self._prefs.attachAttributeObserver( - "configCycleInterval", self._rescheduleConfig - ) - - if not ITaskSplitter.providedBy(taskSplitter): - raise TypeError("taskSplitter must provide ITaskSplitter") - else: - self._taskSplitter = taskSplitter - - if not IConfigurationListener.providedBy(configurationListener): - raise TypeError( - "configurationListener must provide IConfigurationListener" - ) + Initializes a CollectorDaemon instance. + + :param preferences: the collector configuration + :type preferences: ICollectorPreferences + :param taskSplitter: the task splitter to use for this collector + :type taskSplitter: ITaskSplitter + :param configurationListener: A listener that can react to + notifications on configuration changes. + :type configurationListener: IConfigurationListener + :param initializationCallback: a callable that will be executed after + connection to the hub but before retrieving configuration + information. + :type initializationCallback: any callable, optional + :param stoppingCallback: a callable that will be executed first during + the stopping process. Exceptions will be logged but otherwise + ignored. + :type stoppingCallback: any callable, optional + """ + _verify_input_args(preferences, taskSplitter, configurationListener) + + self._prefs = ObservableProxy(preferences) + self._prefs.attachAttributeObserver( + "configCycleInterval", self._reschedule_configcycle + ) + self._taskSplitter = taskSplitter self._configListener = ConfigListenerNotifier() - self._configListener.addListener(configurationListener) - self._configListener.addListener(DeviceGuidListener(self)) + if configurationListener is not None: + self._configListener.addListener(configurationListener) self._initializationCallback = initializationCallback self._stoppingCallback = stoppingCallback - # register the various interfaces we provide the rest of the system so + # Register the various interfaces we provide the rest of the system so # that collector implementors can easily retrieve a reference back here # if needed - zope.component.provideUtility(self, ICollector) - zope.component.provideUtility(self, IEventService) - zope.component.provideUtility(self, IDataService) + provideUtility(self, ICollector) + provideUtility(self, IDataService) - # register the collector's own preferences object so it may be easily + # Register the collector's own preferences object so it may be easily # retrieved by factories, tasks, etc. - zope.component.provideUtility( + provideUtility( self.preferences, ICollectorPreferences, self.preferences.collectorName, ) + # There's only one preferences object, so also register an + # anonymous ICollectorPreferences utility. + provideUtility( + self.preferences, + ICollectorPreferences, + ) super(CollectorDaemon, self).__init__( name=self.preferences.collectorName ) - self._statService = StatisticsService() - zope.component.provideUtility(self._statService, IStatisticsService) - - if self.options.cycle: - # setup daemon statistics (deprecated names) - self._statService.addStatistic("devices", "GAUGE") - self._statService.addStatistic("dataPoints", "DERIVE") - self._statService.addStatistic("runningTasks", "GAUGE") - self._statService.addStatistic("taskCount", "GAUGE") - self._statService.addStatistic("queuedTasks", "GAUGE") - self._statService.addStatistic("missedRuns", "GAUGE") - - # namespace these a bit so they can be used in ZP monitoring. - # prefer these stat names and metrology in future refs - self._dataPointsMetric = Metrology.meter( - "collectordaemon.dataPoints" - ) - daemon = self - - class DeviceGauge(Gauge): - @property - def value(self): - return len(daemon._devices) - - Metrology.gauge("collectordaemon.devices", DeviceGauge()) - - # Scheduler statistics - class RunningTasks(Gauge): - @property - def value(self): - return daemon._scheduler._executor.running - Metrology.gauge("collectordaemon.runningTasks", RunningTasks()) + load_config("collector.zcml", ZENCOLLECTOR_MODULE) - class TaskCount(Gauge): - @property - def value(self): - return daemon._scheduler.taskCount - - Metrology.gauge("collectordaemon.taskCount", TaskCount()) - - class QueuedTasks(Gauge): - @property - def value(self): - return daemon._scheduler._executor.queued - - Metrology.gauge("collectordaemon.queuedTasks", QueuedTasks()) + configFilter = parseWorkerOptions(self.options.__dict__, self.log) + if configFilter: + self.preferences.configFilter = configFilter - class MissedRuns(Gauge): - @property - def value(self): - return daemon._scheduler.missedRuns + dcui = self.options.device_config_update_interval + if dcui: + # Convert minutes to seconds + self._device_config_update_interval = dcui * 60 + else: + # This covers the case where the device_config_update_interval + # value is None, zero, or some other False-like value. + self._device_config_update_interval = 300 - Metrology.gauge("collectordaemon.missedRuns", MissedRuns()) + self._config_update_interval = self._prefs.configCycleInterval * 60 self._deviceGuids = {} - self._devices = set() self._unresponsiveDevices = set() self._rrd = None - self._metric_writer = None - self._derivative_tracker = None self.reconfigureTimeout = None - # keep track of pending tasks if we're doing a single run, and not a + # Keep track of pending tasks if we're doing a single run, and not a # continuous cycle if not self.options.cycle: self._completedTasks = 0 self._pendingTasks = [] - frameworkFactory = zope.component.queryUtility( - IFrameworkFactory, self._frameworkFactoryName - ) - self._configProxy = frameworkFactory.getConfigurationProxy() - self._scheduler = frameworkFactory.getScheduler() + framework = _getFramework(self.frameworkFactoryName) + self._configProxy = framework.getConfigurationProxy() + + self._scheduler = framework.getScheduler() self._scheduler.maxTasks = self.options.maxTasks - self._ConfigurationLoaderTask = ( - frameworkFactory.getConfigurationLoaderTask() - ) - # OLD - set the initialServices attribute so that the PBDaemon class + self._statService = getUtility(IStatisticsService) + if self.options.cycle: + _configure_stats_service(self._statService, self) + + # Set the initialServices attribute so that the PBDaemon class # will load all of the remote services we need. - self.initialServices = PBDaemon.initialServices + [ - self.preferences.configurationService - ] + self.initialServices.append(self.preferences.configurationService) - # trap SIGUSR2 so that we can display detailed statistics + # Trap SIGUSR2 so that we can display detailed statistics signal.signal(signal.SIGUSR2, self._signalHandler) - # let the configuration do any additional startup it might need + # Let the configuration do any additional startup it might need self.preferences.postStartup() - self.addedPostStartupTasks = False # Variables used by enterprise collector in resmgr # # Flag that indicates we have finished loading the configs for the - # first time after a restart. + # first time after a restart self.firstConfigLoadDone = False # Flag that indicates the daemon has received the encryption key - # from zenhub. + # from zenhub self.encryptionKeyInitialized = False - # flag that indicates the daemon is loading the cached configs - self.loadingCachedConfigs = False - def buildOptions(self): - """ - Method called by CmdBase.__init__ to build all of the possible - command-line options for this collector daemon. - """ + # Initialize the object used for retrieving properties, thresholds, + # and other non-device configurations from ZenHub. + self._configloader = ConfigurationLoaderTask(self, self._configProxy) + + # Initialize the object used for retrieving device configurations. + if self.options.device: + self._deviceloader = SingleDeviceConfigLoader( + self.options.device, + self, + self.preferences.configurationService, + self.options, + self._singleDeviceConfigCallback, + ) + else: + self._deviceloader = ManyDeviceConfigLoader( + self._configProxy, self._manyDeviceConfigCallback + ) + + # If cycling is enabled, initialize the tasks that will run + # on an interval. + if self.options.cycle: + self._configcycle = _TaskCycle( + self._configloader, + self._config_update_interval, + self.log, + description="properties, thresholds, etc. retrieval", + now=False + ) + self._devicecycle = _TaskCycle( + self._deviceloader, + self._device_config_update_interval, + self.log, + description="device configuration retrieval", + ) + if self.options.logTaskStats: + self._taskstatscycle = _TaskCycle( + lambda: self._displayStatistics(verbose=True), + self.options.logTaskStats, + self.log, + description="task statistics logging", + now=False, + ) + else: + self._taskstatscycle = None + + # deprecated; kept for vSphere ZP compatibility + self._devices = _DeviceIdProxy(self._deviceloader) + + @property + def preferences(self): # type: (Self) -> ICollectorPreferences + """The preferences object of this daemon.""" + return self._prefs + + @property + def frameworkFactoryName(self): # type: (Self) -> str + return self._frameworkFactoryName + + def buildOptions(self): # type: (Self) -> None super(CollectorDaemon, self).buildOptions() maxTasks = getattr(self.preferences, "maxTasks", None) @@ -342,8 +261,9 @@ def buildOptions(self): dest="logTaskStats", type="int", default=0, - help="How often to logs statistics of current tasks, " - "value in seconds; very verbose", + help="How often to logs statistics of current tasks, value in " + "seconds; very verbose. Value of zero disables logging of " + "task statistics.", ) addWorkerOptions(self.parser) self.parser.add_option( @@ -360,82 +280,135 @@ def buildOptions(self): default=None, help="trace metrics whose key value matches this regex", ) - - frameworkFactory = zope.component.queryUtility( - IFrameworkFactory, self._frameworkFactoryName + self.parser.add_option( + "--device-config-update-interval", + type="int", + default=5, + help="The interval, in minutes, that device configs are " + "checked for updates (default %default).", ) - if hasattr(frameworkFactory, "getFrameworkBuildOptions"): - # During upgrades we'll be missing this option - self._frameworkBuildOptions = ( - frameworkFactory.getFrameworkBuildOptions() - ) - if self._frameworkBuildOptions: - self._frameworkBuildOptions(self.parser) + + framework = _getFramework(self.frameworkFactoryName) + buildOpts = framework.getFrameworkBuildOptions() + if buildOpts: + buildOpts(self.parser) # give the collector configuration a chance to add options, too self.preferences.buildOptions(self.parser) - def parseOptions(self): + def parseOptions(self): # type: (Self) -> None + """Overrides base class to process configuration options.""" super(CollectorDaemon, self).parseOptions() self.preferences.options = self.options - configFilter = parseWorkerOptions(self.options.__dict__) - if configFilter: - self.preferences.configFilter = configFilter + # @deprecated + def getInitialServices(self): # type: (Self) -> Sequence[str] + # Retained for compatibility with ZenPacks fixing CollectorDaemon's old + # behavior regarding the `initialServices` attribute. This new + # CollectorDaemon respects changes made to the `initialServices` + # attribute by subclasses, so the reason for overriding this method + # is no longer valid. However, for this method must continue to exist + # to avoid AttributeError exceptions. + return self.initialServices - def connected(self): + def watchdogCycleTime(self): # type: (Self) -> float """ - Method called by PBDaemon after a connection to ZenHub is established. + Return our cycle time (in minutes) + + :return: cycle time + :rtype: integer """ - return self._startup() + return self.preferences.cycleInterval * 2 - def _getInitializationCallback(self): - def doNothing(): - pass + @defer.inlineCallbacks + def connected(self): # type: (Self) -> Deferred + """Invoked after a connection to ZenHub is established.""" + try: + yield defer.maybeDeferred(self._getInitializationCallback()) + framework = _getFramework(self.frameworkFactoryName) + self.log.debug("using framework factory %r", framework) + yield self._initEncryptionKey() + + # Initial configuration load + yield self._configloader() + + # Add "post startup" tasks provided by preferences + self._add_poststartuptasks() + + if self.options.cycle: + self._configcycle.start() + self._startMaintenance() + self._devicecycle.start() + if self._taskstatscycle is not None: + self._taskstatscycle.start() + else: + # Since we're going to run once, load the device config(s) now. + yield self._deviceloader() + except Exception as ex: + self.log.critical("unrecoverable error: %s", ex) + self.log.exception("failed during startup") + self.stop() + def _getInitializationCallback(self): if self._initializationCallback is not None: return self._initializationCallback - else: - return doNothing - - def connectTimeout(self): - super(CollectorDaemon, self).connectTimeout() - return self._startup() - - def _startup(self): - d = defer.maybeDeferred(self._getInitializationCallback()) - d.addCallback(self._initEncryptionKey) - d.addCallback(self._startConfigCycle) - d.addCallback(self._startMaintenance) - d.addErrback(self._errorStop) - return d + return lambda: None @defer.inlineCallbacks - def _initEncryptionKey(self, prv_cb_result=None): - # encrypt dummy msg in order to initialize the encryption key - data = yield self._configProxy.encrypt( - "Hello" - ) # block until we get the key - if data: # encrypt returns None if an exception is raised + def _initEncryptionKey(self): # type: (Self) -> Deferred + # Encrypt dummy msg in order to initialize the encryption key. + # The 'yield' does not return until the key is initialized. + data = yield self._configProxy.encrypt("Hello") + if data: # Encrypt returns None if an exception is raised self.encryptionKeyInitialized = True - self.log.info("Daemon's encryption key initialized") + self.log.debug("initialized encryption key") - def watchdogCycleTime(self): - """ - Return our cycle time (in minutes) + def _add_poststartuptasks(self): + post_startup_tasks = getattr( + self.preferences, "postStartupTasks", lambda: [] + ) + for task_ in post_startup_tasks(): + self._scheduler.addTask(task_, now=True) - @return: cycle time - @rtype: integer - """ - return self.preferences.cycleInterval * 2 + def _reschedule_configcycle( + self, observable, attrName, oldValue, newValue, **kwargs + ): + if not self.options.cycle: + return + if oldValue == newValue: + return + self.log.info( + "changed configuration loader task interval from %s to %s minutes", + oldValue, + newValue, + ) + self._configcycle.interval = newValue * 60 - def getRemoteConfigServiceProxy(self): - """ - Called to retrieve the remote configuration service proxy object. - """ - return self.services.get( - self.preferences.configurationService, FakeRemote() + def _startMaintenance(self): + interval = self.preferences.cycleInterval + + if self.worker_id == 0: + heartbeatSender = ZenHubHeartbeatSender( + self.options.monitor, + self.name, + self.options.heartbeatTimeout, + ) + else: + heartbeatSender = None + self._maintenanceCycle = MaintenanceCycle( + interval, heartbeatSender, self._maintenanceCallback ) + self._maintenanceCycle.start() + + @defer.inlineCallbacks + def getRemoteConfigCacheProxy(self): + """Return the remote configuration cache proxy.""" + proxy = yield self.getService(self._cacheServiceName) + defer.returnValue(proxy) + + def getRemoteConfigServiceProxy(self): + """Return the remote configuration service proxy object.""" + return self.getServiceNow(self.preferences.configurationService) def generateEvent(self, event, **kw): eventCopy = super(CollectorDaemon, self).generateEvent(event, **kw) @@ -450,7 +423,9 @@ def should_trace_metric(self, metric, contextkey): """ Tracer implementation - use this function to indicate whether a given metric/contextkey combination is to be traced. + :param metric: name of the metric in question + :type metric: str :param contextkey: context key of the metric in question :return: boolean indicating whether to trace this metric/key """ @@ -459,9 +434,7 @@ def should_trace_metric(self, metric, contextkey): tests.append((self.options.traceMetricName, metric)) if self.options.traceMetricKey: tests.append((self.options.traceMetricKey, contextkey)) - result = [bool(re.search(exp, subj)) for exp, subj in tests] - return len(result) > 0 and all(result) @defer.inlineCallbacks @@ -475,33 +448,33 @@ def writeMetric( timestamp="N", min="U", max="U", - threshEventData={}, + threshEventData=None, deviceId=None, contextUUID=None, deviceUUID=None, ): - """ Writes the metric to the metric publisher. - @param contextKey: This is who the metric applies to. This is usually - the return value of rrdPath() for a component or - device. - @param metric: the name of the metric, we expect it to be of the form - datasource_datapoint - @param value: the value of the metric - @param metricType: type of the metric (e.g. 'COUNTER', 'GAUGE', + + :param contextKey: This is who the metric applies to. This is usually + the return value of rrdPath() for a component or device. + :param metric: the name of the metric, we expect it to be of the form + datasource_datapoint. + :param value: the value of the metric. + :param metricType: type of the metric (e.g. 'COUNTER', 'GAUGE', 'DERIVE' etc) - @param contextId: used for the threshold events, the id of who - this metric is for. - @param timestamp: defaults to time.time() if not specified, + :param contextId: used for the threshold events, the id of who this + metric is for. + :param timestamp: defaults to time.time() if not specified, the time the metric occurred. - @param min: used in the derive the min value for the metric - @param max: used in the derive the max value for the metric - @param threshEventData: extra data put into threshold events - @param deviceId: the id of the device for this metric - @return: a deferred that fires when the metric gets published + :param min: used in the derive the min value for the metric. + :param max: used in the derive the max value for the metric. + :param threshEventData: extra data put into threshold events. + :param deviceId: the id of the device for this metric. + :return: a deferred that fires when the metric gets published. """ timestamp = int(time.time()) if timestamp == "N" else timestamp + threshEventData = threshEventData if threshEventData else {} tags = {"contextUUID": contextUUID, "key": contextKey} if self.should_trace_metric(metric, contextKey): tags["mtrace"] = "{}".format(int(time.time())) @@ -517,7 +490,7 @@ def writeMetric( min = 0 dkey = "%s:%s" % (contextUUID, metric) - value = self._derivative_tracker.derivative( + value = self.derivativeTracker().derivative( dkey, (float(value), timestamp), min, max ) @@ -526,14 +499,14 @@ def writeMetric( # write the metric to Redis try: yield defer.maybeDeferred( - self._metric_writer.write_metric, + self.metricWriter().write_metric, metric_name, value, timestamp, tags, ) except Exception as e: - self.log.debug("Error sending metric %s", e) + self.log.debug("error sending metric %s", e) yield defer.maybeDeferred( self._threshold_notifier.notify, contextUUID, @@ -552,10 +525,9 @@ def writeMetricWithMetadata( timestamp="N", min="U", max="U", - threshEventData={}, + threshEventData=None, metadata=None, ): - metadata = metadata or {} try: key = metadata["contextKey"] @@ -594,15 +566,13 @@ def writeRRD( cycleTime=None, min="U", max="U", - threshEventData={}, + threshEventData=None, timestamp="N", allowStaleDatapoint=True, ): - """ - Use writeMetric - """ - # We rely on the fact that rrdPath now returns more information - # than just the path. + """Use writeMetric instead.""" + # We rely on the fact that rrdPath now returns more information than + # just the path metricinfo, metric = path.rsplit("/", 1) if "METRIC_DATA" not in str(metricinfo): raise Exception( @@ -628,138 +598,118 @@ def stop(self, ignored=""): try: self._stoppingCallback() except Exception: - self.log.exception("Exception while stopping daemon") + self.log.exception("exception while stopping daemon") super(CollectorDaemon, self).stop(ignored) - def remote_deleteDevice(self, devId): - """ - Called remotely by ZenHub when a device we're monitoring is deleted. - """ - # guard against parsing updates during a disconnect - if devId is None: - return - self._deleteDevice(devId) + def _taskCompleteCallback(self, taskName): + # if we're not running a normal daemon cycle then we need to shutdown + # once all of our pending tasks have completed + if not self.options.cycle: + try: + self._pendingTasks.remove(taskName) + except ValueError: + pass - def remote_deleteDevices(self, deviceIds): - """ - Called remotely by ZenHub when devices we're monitoring are deleted. - """ - # guard against parsing updates during a disconnect - if deviceIds is None: - return - for devId in Zipper.load(deviceIds): - self._deleteDevice(devId) + self._completedTasks += 1 - def remote_updateDeviceConfig(self, config): - """ - Called remotely by ZenHub when asynchronous configuration - updates occur. - """ - # guard against parsing updates during a disconnect - if config is None: - return - self.log.debug("Device %s updated", config.configId) - if self._updateConfig(config): - self._configProxy.updateConfigProxy(self.preferences, config) - else: - self.log.debug("Device %s config filtered", config.configId) + # if all pending tasks have been completed then shutdown the daemon + if len(self._pendingTasks) == 0: + self.log.info( + "completed collection tasks count=%s", + self._completedTasks, + ) + self.stop() - def remote_updateDeviceConfigs(self, configs): - """ - Called remotely by ZenHub when asynchronous configuration - updates occur. + def _singleDeviceConfigCallback(self, config): + # type: (Self, DeviceProxy) -> None """ - if configs is None: - return - configs = Zipper.load(configs) - self.log.debug( - "remote_updateDeviceConfigs: workerid %s processing " - "%s device configs", - self.options.workerid, - len(configs), - ) - for config in configs: - self.remote_updateDeviceConfig(config) + Update the device config for the device this collector manages + when a device is specified on the command line. - def remote_notifyConfigChanged(self): - """ - Called from zenhub to notify that the entire config should be updated + :param new: a list of new device configurations + :type new: Sequence[DeviceProxy] + :param updated: a list of updated device configurations + :type updated: Sequence[DeviceProxy] + :param removed: ignored + :type removed: Sequence[str] """ - if self.reconfigureTimeout and self.reconfigureTimeout.active(): - # We will run along with the already scheduled task - self.log.debug("notifyConfigChanged - using existing call") + if not config: + self.log.error( + "configuration for %s unavailable -- " + "is that the correct name?", + self.options.device, + ) + self.stop() return - self.log.debug("notifyConfigChanged - scheduling call in 30 seconds") - self.reconfigureTimeout = reactor.callLater(30, self._rebuildConfig) + guid = config.deviceGuid + if guid is not None: + self._deviceGuids[config.configId] = guid - def _rebuildConfig(self): - """ - Delete and re-add the configuration tasks to completely re-build - the configuration. - """ - if self.reconfigureTimeout and not self.reconfigureTimeout.active(): - self.reconfigureTimeout = None - self._scheduler.removeTasksForConfig(CONFIG_LOADER_NAME) - self._startConfigCycle() + self._updateConfig(config) - def _rescheduleConfig( - self, observable, attrName, oldValue, newValue, **kwargs - ): + def _manyDeviceConfigCallback(self, new, updated, removed): + # type: ( + # Self, + # Sequence[DeviceProxy], + # Sequence[DeviceProxy], + # Sequence[str] + # ) -> None """ - Delete and re-add the configuration tasks to start on new interval. + Update the device configs for the devices this collector manages + when no device is specified on the command line. + + :param new: a list of new device configurations + :type new: Sequence[DeviceProxy] + :param updated: a list of updated device configurations + :type updated: Sequence[DeviceProxy] + :param removed: a list of devices removed from this collector + :type removed: Sequence[str] """ - if oldValue != newValue: - self.log.debug( - "Changing config task interval from %s to %s minutes", - oldValue, - newValue, - ) - self._scheduler.removeTasksForConfig(CONFIG_LOADER_NAME) - # values are in minutes, scheduler takes seconds - self._startConfigCycle(startDelay=newValue * 60) + for deviceId in removed: + self._deleteDevice(deviceId) - def _taskCompleteCallback(self, taskName): - # if we're not running a normal daemon cycle then we need to shutdown - # once all of our pending tasks have completed - if not self.options.cycle: - try: - self._pendingTasks.remove(taskName) - except ValueError: - pass + for cfg in itertools.chain(new, updated): + # guard against parsing updates during a disconnect + if cfg is None: + continue - self._completedTasks += 1 + guid = cfg.deviceGuid + if guid is not None: + self._deviceGuids[cfg.configId] = guid - # if all pending tasks have been completed then shutdown the daemon - if len(self._pendingTasks) == 0: - self._displayStatistics() - self.stop() + self._updateConfig(cfg) + + sizes = _DeviceConfigSizes(new, updated, removed) + logmethod = self.log.debug if not sizes else self.log.info + logmethod( + "processed %d new, %d updated, and %d removed device configs", + sizes.new, + sizes.updated, + sizes.removed, + ) + + def _deleteDevice(self, deviceId): + self._configListener.deleted(deviceId) + self._scheduler.removeTasksForConfig(deviceId) + self._deviceGuids.pop(deviceId, None) + self.log.info("removed device config device-id=%s", deviceId) def _updateConfig(self, cfg): """ Update device configuration. - Returns true if config is updated, false if config is skipped + Returns True if the configuration was processed, otherwise, + False is returned. """ - - # guard against parsing updates during a disconnect - if cfg is None: - return False - configFilter = getattr(self.preferences, "configFilter", None) or ( - lambda x: True - ) - if not ( - (not self.options.device and configFilter(cfg)) - or self.options.device in (cfg.id, cfg.configId) - ): - self.log.info("Device %s config filtered", cfg.configId) + if self._is_config_excluded(cfg): return False configId = cfg.configId - self.log.debug("Processing configuration for %s", configId) + self.log.debug("processing device config config-id=%s", configId) nextExpectedRuns = {} - if configId in self._devices: + if configId in self._deviceloader.deviceIds: tasksToRemove = self._scheduler.getTasksForConfig(configId) nextExpectedRuns = { taskToRemove.name: self._scheduler.getNextExpectedRun( @@ -767,17 +717,20 @@ def _updateConfig(self, cfg): ) for taskToRemove in tasksToRemove } - self._scheduler.removeTasks(task.name for task in tasksToRemove) + self._scheduler.removeTasks( + tuple(task.name for task in tasksToRemove) + ) self._configListener.updated(cfg) else: - self._devices.add(configId) self._configListener.added(cfg) + self._update_thresholds(configId, cfg) + newTasks = self._taskSplitter.splitConfiguration([cfg]) - self.log.debug("Tasks for config %s: %s", configId, newTasks) + self.log.debug("tasks for config %s: %s", configId, newTasks) nowTime = time.time() - for (taskName, task_) in newTasks.iteritems(): + for taskName, task_ in newTasks.iteritems(): # if not cycling run the task immediately, # otherwise let the scheduler decide when to run the task now = not self.options.cycle @@ -794,169 +747,84 @@ def _updateConfig(self, cfg): try: self._scheduler.addTask(task_, self._taskCompleteCallback, now) except ValueError: - self.log.exception("Error adding device config") + self.log.exception( + "failed to schedule task name=%s config-id=%s", + task_.name, + task_.configId, + ) continue - # TODO: another hack? - if hasattr(cfg, "thresholds"): - self.getThresholds().updateForDevice(configId, cfg.thresholds) - # if we're not running a normal daemon cycle then keep track of the # tasks we just added for this device so that we can shutdown once # all pending tasks have completed if not self.options.cycle: self._pendingTasks.append(taskName) + # put tasks on pause after configuration update to prevent # unnecessary collections ZEN-25463 if configId in self._unresponsiveDevices: - self.log.debug("Pausing tasks for device %s", configId) + self.log.debug("pausing tasks for device %s", configId) self._scheduler.pauseTasksForConfig(configId) + self.log.info("processed device config config-id=%s", configId) return True - @defer.inlineCallbacks - def _updateDeviceConfigs(self, updatedConfigs, purgeOmitted): - """ - Update the device configurations for the devices managed by this - collector. - @param deviceConfigs a list of device configurations - @type deviceConfigs list of name,value tuples - """ - self.log.debug( - "updateDeviceConfigs: updatedConfigs=%s", - map(str, updatedConfigs), - ) - - for cfg in updatedConfigs: - self._updateConfig(cfg) - # yield time to reactor so other things can happen - yield task.deferLater(reactor, 0, lambda: None) - - if purgeOmitted: - self._purgeOmittedDevices(cfg.configId for cfg in updatedConfigs) - - def _purgeOmittedDevices(self, updatedDevices): - """ - Delete all current devices that are omitted from the list of - devices being updated. - - @param updatedDevices a collection of device ids - @type updatedDevices a sequence of strings - """ - # remove tasks for the deleted devices - deletedDevices = set(self._devices) - set(updatedDevices) - self.log.debug( - "purgeOmittedDevices: deletedConfigs=%s", ",".join(deletedDevices) - ) - for configId in deletedDevices: - self._deleteDevice(configId) - - def _deleteDevice(self, deviceId): - self.log.debug("Device %s deleted", deviceId) - - self._devices.discard(deviceId) - self._configListener.deleted(deviceId) - self._configProxy.deleteConfigProxy(self.preferences, deviceId) - self._scheduler.removeTasksForConfig(deviceId) - - def _errorStop(self, result): - """ - Twisted callback to receive fatal messages. - - @param result: the Twisted failure - @type result: failure object - """ - if isinstance(result, Failure): - msg = result.getErrorMessage() - else: - msg = str(result) - self.log.critical("Unrecoverable Error: %s", msg) - self.stop() + def _update_thresholds(self, configId, cfg): + thresholds = getattr(cfg, "thresholds", None) + if thresholds: + try: + self.getThresholds().updateForDevice(configId, thresholds) + except Exception: + self.log.exception( + "failed to update thresholds config-id=%s thresholds=%r", + configId, + thresholds, + ) - def _startConfigCycle(self, result=None, startDelay=0): - configLoader = self._ConfigurationLoaderTask( - CONFIG_LOADER_NAME, taskConfig=self.preferences - ) - configLoader.startDelay = startDelay - # Don't add the config loader task if the scheduler already has - # an instance of it. - if configLoader not in self._scheduler: - # Run initial maintenance cycle as soon as possible - # TODO: should we not run maintenance if running in non-cycle mode? - self._scheduler.addTask(configLoader) - else: - self.log.info("%s already added to scheduler", configLoader.name) - return defer.succeed("Configuration loader task started") + def _is_config_excluded(self, cfg): + configFilter = getattr(self.preferences, "configFilter", _always_ok) + if not ( + (not self.options.device and configFilter(cfg)) + or self.options.device in (cfg.id, cfg.configId) + ): + self.log.info( + "filtered out device config config-id=%s", cfg.configId + ) + return True + return False def setPropertyItems(self, items): - """ - Override so that preferences are updated - """ + """Override so that preferences are updated.""" super(CollectorDaemon, self).setPropertyItems(items) self._setCollectorPreferences(dict(items)) def _setCollectorPreferences(self, preferenceItems): for name, value in preferenceItems.iteritems(): if not hasattr(self.preferences, name): - # TODO: make a super-low level debug mode? - # The following message isn't helpful - # self.log.debug( - # "Preferences object does not have attribute %s", name - # ) setattr(self.preferences, name, value) elif getattr(self.preferences, name) != value: - self.log.debug("Updated %s preference to %s", name, value) + self.log.debug("updated %s preference to %s", name, value) setattr(self.preferences, name, value) - def _loadThresholdClasses(self, thresholdClasses): - self.log.debug("Loading classes %s", thresholdClasses) - for c in thresholdClasses: - try: - importClass(c) - except ImportError: - log.exception("Unable to import class %s", c) - def _configureThresholds(self, thresholds): self.getThresholds().updateList(thresholds) - def _startMaintenance(self, ignored=None): - unused(ignored) - if not self.options.cycle: - self._maintenanceCycle() - return - if self.options.logTaskStats > 0: - log.debug("Starting Task Stat logging") - loop = task.LoopingCall(self._displayStatistics, verbose=True) - loop.start(self.options.logTaskStats, now=False) - - interval = self.preferences.cycleInterval - self.log.debug("Initializing maintenance Cycle") - heartbeatSender = self if self.worker_id == 0 else None - maintenanceCycle = MaintenanceCycle( - interval, heartbeatSender, self._maintenanceCycle - ) - maintenanceCycle.start() - @defer.inlineCallbacks - def _maintenanceCycle(self, ignored=None): + def _maintenanceCallback(self, ignored=None): """ - Perform daemon maintenance processing on a periodic schedule. Initially - called after the daemon configuration loader task is added, + Perform daemon maintenance processing on a periodic schedule. + + Initially called after the daemon configuration loader task is added, but afterward will self-schedule each run. """ try: - self.log.debug("Performing periodic maintenance") - if not self.options.cycle: - ret = "No maintenance required" - elif getattr(self.preferences, "pauseUnreachableDevices", True): + if self.options.cycle and getattr( + self.preferences, "pauseUnreachableDevices", True + ): # TODO: handle different types of device issues - ret = yield self._pauseUnreachableDevices() - else: - ret = None - defer.returnValue(ret) + yield self._pauseUnreachableDevices() except Exception: - self.log.exception("failure in _maintenanceCycle") - raise + self.log.exception("failure while running maintenance callback") @defer.inlineCallbacks def _pauseUnreachableDevices(self): @@ -967,38 +835,26 @@ def _pauseUnreachableDevices(self): # Device ping issues returns as a tuple of (deviceId, count, total) # and we just want the device id - newUnresponsiveDevices = set(i[0] for i in issues) + newUnresponsiveDevices = {i[0] for i in issues} clearedDevices = self._unresponsiveDevices.difference( newUnresponsiveDevices ) for devId in clearedDevices: - self.log.debug("Resuming tasks for device %s", devId) + self.log.debug("resuming tasks for device %s", devId) self._scheduler.resumeTasksForConfig(devId) self._unresponsiveDevices = newUnresponsiveDevices for devId in self._unresponsiveDevices: - self.log.debug("Pausing tasks for device %s", devId) + self.log.debug("pausing tasks for device %s", devId) self._scheduler.pauseTasksForConfig(devId) defer.returnValue(issues) - def runPostConfigTasks(self, result=None): + def runPostConfigTasks(self): """ Add post-startup tasks from the preferences. - - This may be called with the failure code as well. """ - if isinstance(result, Failure): - pass - - elif not self.addedPostStartupTasks: - postStartupTasks = getattr( - self.preferences, "postStartupTasks", lambda: [] - ) - for _task in postStartupTasks(): - self._scheduler.addTask(_task, now=True) - self.addedPostStartupTasks = True def postStatisticsImpl(self): self._displayStatistics() @@ -1006,7 +862,7 @@ def postStatisticsImpl(self): # update and post statistics if we've been configured to do so if self.rrdStats: stat = self._statService.getStatistic("devices") - stat.value = len(self._devices) + stat.value = len(self._deviceloader.deviceIds) # stat = self._statService.getStatistic("cyclePoints") # stat.value = self._rrd.endCycle() @@ -1036,14 +892,15 @@ def postStatisticsImpl(self): def _displayStatistics(self, verbose=False): if self.metricWriter(): - self.log.info( - "%d devices processed (%d datapoints)", - len(self._devices), + self.log.debug( + "%d devices processed (%d samples)", + len(self._deviceloader.deviceIds), self.metricWriter().dataPoints, ) else: - self.log.info( - "%d devices processed (0 datapoints)", len(self._devices) + self.log.debug( + "%d devices processed (0 samples)", + len(self._deviceloader.deviceIds), ) self._scheduler.displayStatistics(verbose) @@ -1053,65 +910,130 @@ def _signalHandler(self, signum, frame): @property def worker_count(self): - """ - worker_count for this daemon - """ + """The count of service instances.""" return getattr(self.options, "workers", 1) @property def worker_id(self): - """ - worker_id for this particular peer - """ + """The ID of this particular service instance.""" return getattr(self.options, "workerid", 0) -@zope.interface.implementer(IStatistic) -class Statistic(object): - def __init__(self, name, type, **kwargs): - self.value = 0 - self.name = name - self.type = type - self.kwargs = kwargs +def _verify_input_args(prefs, tasksplitter, configlistener): + if not ICollectorPreferences.providedBy(prefs): + raise TypeError("configuration must provide ICollectorPreferences") + if not ITaskSplitter.providedBy(tasksplitter): + raise TypeError("taskSplitter must provide ITaskSplitter") + if configlistener is not None: + if not IConfigurationListener.providedBy(configlistener): + raise TypeError( + "configurationListener must provide IConfigurationListener" + ) -@zope.interface.implementer(IStatisticsService) -class StatisticsService(object): - def __init__(self): - self._stats = {} +@attr.s(frozen=True, slots=True) +class _DeviceConfigSizes(object): + new = attr.ib(converter=len) + updated = attr.ib(converter=len) + removed = attr.ib(converter=len) - def addStatistic(self, name, type, **kwargs): - if name in self._stats: - raise NameError("Statistic %s already exists" % name) + def __nonzero__(self): + return (self.new, self.updated, self.removed) != (0, 0, 0) - if type not in ("DERIVE", "COUNTER", "GAUGE"): - raise TypeError("Statistic type %s not supported" % type) - stat = Statistic(name, type, **kwargs) - self._stats[name] = stat +class _DeviceIdProxy(object): + """ + Exists to maintain an API for ZenPacks that accessed CollectorDaemon's + _devices attribute. + """ - def getStatistic(self, name): - return self._stats[name] + def __init__(self, loader): + self.__loader = loader - def postStatistics(self, rrdStats): - for stat in self._stats.values(): - # figure out which function to use to post this statistical data - try: - func = { - "COUNTER": rrdStats.counter, - "GAUGE": rrdStats.gauge, - "DERIVE": rrdStats.derive, - }[stat.type] - except KeyError: - raise TypeError("Statistic type %s not supported" % stat.type) + def __contains__(self, deviceId): + return deviceId in self.__loader.deviceIds - # These should always come back empty now because DaemonStats - # posts the events for us - func(stat.name, stat.value, **stat.kwargs) + def add(self, deviceId): + pass - # counter is an ever-increasing value, but otherwise... - if stat.type != "COUNTER": - stat.value = 0 + def discard(self, deviceId): + pass + + +class _TaskCycle(object): + """ + Invoke a callable object at a regular interval. + """ + + def __init__(self, func, interval, log, description=None, now=True): + self._log = log + self._func = func + self._interval = interval + self._now = now + if description: + self._desc = description + elif hasattr(func, "im_func"): + self._desc = func.im_func.func_name + else: + self._desc = func.__class__.__name__ + self._loop = None + self._loopd = None + self._triggerid = None + + @property + def interval(self): + return self._interval + + @interval.setter + def interval(self, value): + if value == self._interval: + return + self._interval = value + self._reschedule() + + def start(self): + if self._loop is not None: + return + self._loop = task.LoopingCall(self._func) + self._loopd = self._loop.start(self._interval, now=self._now) + self._triggerid = reactor.addSystemEventTrigger( + "before", "shutdown", self.stop + ) + self._log.info( + "started %s task interval=%d now=%s", + self._desc, + self._interval, + self._now, + ) + self._loopd.addCallback(self._logstopped) + self._loopd.addErrback(self._logerror) + + def stop(self): + if self._loop is None: + return + self._loop.stop() + self._loop = self._loopd = None + + def _logstopped(self, *args, **kw): + self._log.info("stopped %s task", self._desc) + + def _logerror(self, result): + self._log.error( + "task did not run func=%s error=%s", self._func, result + ) + + def _reschedule(self): + if self._loop is None: + # cycle is not running, so nothing to reschedule + return + self.stop() + reactor.removeSystemEventTrigger(self._triggerid) + self._triggerid = None + self.start() + + +def _always_ok(*args): + return True def addWorkerOptions(parser): @@ -1128,13 +1050,67 @@ def addWorkerOptions(parser): parser.add_option("--workers", type="int", default=1, help=SUPPRESS_HELP) -def parseWorkerOptions(options): +def _getFramework(name): + return queryUtility(IFrameworkFactory, name) + + +def parseWorkerOptions(options, log): dispatchFilterName = options.get("configDispatch", "") if options else "" filterFactories = dict(getUtilitiesFor(IConfigurationDispatchingFilter)) filterFactory = filterFactories.get( dispatchFilterName, None ) or filterFactories.get("", None) if filterFactory: - filter = filterFactory.getFilter(options) - log.debug("Filter configured: %s:%s", filterFactory, filter) - return filter + filt = filterFactory.getFilter(options) + log.debug("configured filter: %s:%s", filterFactory, filt) + return filt + + +def _configure_stats_service(service, daemon): + # setup daemon statistics (deprecated names) + service.addStatistic("devices", "GAUGE") + service.addStatistic("dataPoints", "DERIVE") + service.addStatistic("runningTasks", "GAUGE") + service.addStatistic("taskCount", "GAUGE") + service.addStatistic("queuedTasks", "GAUGE") + service.addStatistic("missedRuns", "GAUGE") + + # namespace these a bit so they can be used in ZP monitoring. + # prefer these stat names and metrology in future refs + daemon._dataPointsMetric = Metrology.meter("collectordaemon.dataPoints") + + class DeviceGauge(Gauge): + @property + def value(self): + return len(daemon._deviceloader.deviceIds) + + Metrology.gauge("collectordaemon.devices", DeviceGauge()) + + # Scheduler statistics + class RunningTasks(Gauge): + @property + def value(self): + return daemon._scheduler._executor.running + + Metrology.gauge("collectordaemon.runningTasks", RunningTasks()) + + class TaskCount(Gauge): + @property + def value(self): + return daemon._scheduler.taskCount + + Metrology.gauge("collectordaemon.taskCount", TaskCount()) + + class QueuedTasks(Gauge): + @property + def value(self): + return daemon._scheduler._executor.queued + + Metrology.gauge("collectordaemon.queuedTasks", QueuedTasks()) + + class MissedRuns(Gauge): + @property + def value(self): + return daemon._scheduler.missedRuns + + Metrology.gauge("collectordaemon.missedRuns", MissedRuns()) diff --git a/Products/ZenCollector/frameworkfactory.py b/Products/ZenCollector/frameworkfactory.py new file mode 100644 index 0000000000..fddaad3d95 --- /dev/null +++ b/Products/ZenCollector/frameworkfactory.py @@ -0,0 +1,39 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from zope.component import queryUtility +from zope.interface import implementer + +from .config import ConfigurationLoaderTask, ConfigurationProxy +from .interfaces import IFrameworkFactory, ICollectorPreferences +from .scheduler import TaskScheduler + + +@implementer(IFrameworkFactory) +class CoreCollectorFrameworkFactory(object): + def __init__(self): + self.__configProxy = None + self.__scheduler = None + + def getConfigurationProxy(self): + if self.__configProxy is None: + prefs = queryUtility(ICollectorPreferences) + self.__configProxy = ConfigurationProxy(prefs) + return self.__configProxy + + def getScheduler(self): + if self.__scheduler is None: + self.__scheduler = TaskScheduler.make() + return self.__scheduler + + def getConfigurationLoaderTask(self): + return ConfigurationLoaderTask + + def getFrameworkBuildOptions(self): + return None diff --git a/Products/ZenCollector/interfaces.py b/Products/ZenCollector/interfaces.py index e48f06d816..331305e175 100644 --- a/Products/ZenCollector/interfaces.py +++ b/Products/ZenCollector/interfaces.py @@ -9,6 +9,8 @@ import zope.interface +# IEventService imported here for ZenPack compability +from Products.ZenHub.interfaces import IEventService # noqa: F401 from Products.ZenUtils.observable import IObservable @@ -113,43 +115,35 @@ class IConfigurationProxy(zope.interface.Interface): the configuration for a collector. """ - def getPropertyItems(prefs): + def getPropertyItems(): """ Retrieve the collector's property items. - @param prefs: the collector preferences object - @type prefs: an object providing ICollectorPreferences @return: properties for this collector @rtype: either a dict or a Deferred """ - def getThresholdClasses(prefs): + def getThresholdClasses(): """ Retrieve the collector's required threshold classes. - @param prefs: the collector preferences object - @type prefs: an object providing ICollectorPreferences @return: the names of all the collector threshold classes to loaded @rtype: an iterable set of strings containing Python class names """ - def getThresholds(prefs): + def getThresholds(): """ Retrieve the collector's threshold definitions. - @param prefs: the collector preferences object - @type prefs: an object providing ICollectorPreferences @return: the threshold definitions @rtype: an iterable set of threshold definitions """ - def getConfigProxies(prefs, ids=[]): + def getConfigProxies(configIds=None): """ Called by the framework whenever the configuration for this collector should be retrieved. - @param prefs: the collector preferences object - @type prefs: an object providing ICollectorPreferences @param configIds: specific config Ids to be configured @type configIds: an iterable @return: a twisted Deferred, optional in case the configure operation @@ -157,24 +151,6 @@ def getConfigProxies(prefs, ids=[]): @rtype: twisted.internet.defer.Deferred """ - def deleteConfigProxy(prefs, configId): - """ - Called by the framework whenever a configuration should be removed. - @param prefs: the collector preferences object - @type prefs: an object providing ICollectorPreferences - @param configId: the identifier to remove - @type: string - """ - - def updateConfigProxy(prefs, config): - """ - Called by the framework whenever the configuration has been updated by - an external event. - @param prefs: the collector preferences object - @type prefs: an object providing ICollectorPreferences - @param config: the updated configuration - """ - class IScheduler(zope.interface.Interface): """ @@ -441,33 +417,93 @@ class IDataService(zope.interface.Interface): """ def writeMetric( - path, + contextKey, metric, value, - timestamp, metricType, - metricId, - min, - max, - hasThresholds, - threshEventData, - allowStaleDatapoint, + contextId, + timestamp="N", + min="U", + max="U", + threshEventData=None, + deviceId=None, + contextUUID=None, + deviceUUID=None, ): """ Write the value provided for the specified metric to Redis - @param path: metric path - @param metric: name of the incoming metric - @param value: value to be writen to Redis - @param metricType: COUNTER, DERIVE, GAUGE, etc. - @param timestamp: when the value was received - @param metricId: unique identifier for the metric - @param min: metric minimum - @param max: metric maximum - @param hasThresholds: boolean indicating presence of thresholds - for this metricId. - @param allowStaleDatapoint: boolean indicating whether stale - values are OK. + @param contextKey: The device or component the metric applies to. + This is typically in the form a path. + @type contextKey: str + @param metric: The name of the metric, we expect it to be of the form + datasource_datapoint. + @type metric: str + @param value: the value of the metric. + @type value: float + @param metricType: type of the metric (e.g. 'COUNTER', 'GAUGE', + 'DERIVE' etc) + @type metricType: str + @param contextId: used for the threshold events, the ID of the device. + @type contextId: str + @param timestamp: defaults to time.time() if not specified, + the time the metric occurred. + @type timestamp: float + @param min: used in the derive the min value for the metric. + @type min: float + @param max: used in the derive the max value for the metric. + @type max: float + @param threshEventData: extra data put into threshold events. + @type threshEventData: dict | None + @param deviceId: the id of the device for this metric. + @type deviceId: str + @param contextUUID: The device/component UUID value + @type contextUUID: str + @param deviceUUID: The device UUID value + @type deviceUUID: str + """ + + def writeMetricWithMetadata( + metric, + value, + metricType, + timestamp="N", + min="U", + max="U", + threshEventData=None, + metadata=None, + ): + """ + Basically wraps the `writeMetric` method. The `metadata` parameter + must contain the following fields: + + contextKey : str + contextId : str + deviceId : str + contextUUID : str + deviceUUID : str + + These fields have the same meaning as in the `writeMetric` method. + + @param metric: The name of the metric, we expect it to be of the form + datasource_datapoint. + @type metric: str + @param value: the value of the metric. + @type value: float + @param metricType: type of the metric (e.g. 'COUNTER', 'GAUGE', + 'DERIVE' etc) + @type metricType: str + @param timestamp: defaults to time.time() if not specified, + the time the metric occurred. + @type timestamp: float + @param min: used in the derive the min value for the metric. + @type min: float + @param max: used in the derive the max value for the metric. + @type max: float + @param threshEventData: extra data put into threshold events. + @type threshEventData: dict + @param metadata: Contains contextual data about the metric. + @type metadata: dict """ def writeRRD( @@ -506,7 +542,7 @@ def writeRRD( @type max: number @param threshEventData: on threshold violation, update the event with this data. - @type threshEventData: dictionary + @type threshEventData: dictionary | None @param allowStaleDatapoint: attempt to write datapoint even if a newer datapoint has already been written. @type allowStaleDatapoint: boolean @@ -515,15 +551,6 @@ def writeRRD( """ -class IEventService(zope.interface.Interface): - """ - A service that allows the sending of an event. - """ - - def sendEvent(event, **kw): - pass - - class IFrameworkFactory(zope.interface.Interface): """ An abstract factory object that allows the collector framework to be @@ -541,15 +568,15 @@ def getScheduler(): Retrieve the framework's implementation of the IScheduler interface. """ - def getConfigurationLoaderTask(): + def getConfigurationLoaderTask(*args, **kw): """ - Retrieve the class definition used by the framework to load - configuration information from zenhub. + Return an instance of the configuration loader task constructed + from the provided arguments. """ - def getFrameworkBuildOptions(): + def getBuildOptions(parser): """ - Retrieve the framework's buildOptions method. + Apply the framework's build options to the given parser object. """ diff --git a/Products/ZenCollector/listeners.py b/Products/ZenCollector/listeners.py new file mode 100644 index 0000000000..5e4d9429ac --- /dev/null +++ b/Products/ZenCollector/listeners.py @@ -0,0 +1,98 @@ +import logging + +from zope.interface import implementer + +from Products.ZenCollector.interfaces import IConfigurationListener + +log = logging.getLogger("zen.daemon.listeners") + + +@implementer(IConfigurationListener) +class DummyListener(object): + """ + No-op implementation of a listener that can be registered with instances + of ConfigListenerNotifier class. + """ + + def deleted(self, configurationId): + log.debug("DummyListener: configuration %s deleted", configurationId) + + def added(self, configuration): + log.debug("DummyListener: configuration %s added", configuration) + + def updated(self, newConfiguration): + log.debug("DummyListener: configuration %s updated", newConfiguration) + + +@implementer(IConfigurationListener) +class ConfigListenerNotifier(object): + """ + Registers other IConfigurationListener objects and notifies them when + this object is notified of configuration removals, adds, and updates. + """ + + _listeners = [] + + def addListener(self, listener): + self._listeners.append(listener) + + def deleted(self, configurationId): + """ + Notify listener when a configuration is deleted. + + :param configurationId: The ID of the deleted configuration. + :type configurationId: str + """ + for listener in self._listeners: + listener.deleted(configurationId) + + def added(self, configuration): + """ + Notify the listeners when a configuration is added. + + :param configuration: The added configuration object. + :type configuration: DeviceProxy + """ + for listener in self._listeners: + listener.added(configuration) + + def updated(self, newConfiguration): + """ + Notify the listeners when a configuration has changed. + + :param newConfiguration: The updated configuration object. + :type newConfiguration: DeviceProxy + """ + for listener in self._listeners: + listener.updated(newConfiguration) + + +@implementer(IConfigurationListener) +class DeviceGuidListener(object): + """ + Manages configuration IDs on the given 'daemon' object, making the + necessary changes when notified of configuration additions, removals, + and updates. + """ + + def __init__(self, daemon): + """ + Initialize a DeviceGuidListener instance. + + :param daemon: The daemon object. + :type daemon: CollectorDaemon + """ + self._daemon = daemon + + def deleted(self, configurationId): + self._daemon._deviceGuids.pop(configurationId, None) + + def added(self, configuration): + deviceGuid = getattr(configuration, "deviceGuid", None) + if deviceGuid: + self._daemon._deviceGuids[configuration.id] = deviceGuid + + def updated(self, newConfiguration): + deviceGuid = getattr(newConfiguration, "deviceGuid", None) + if deviceGuid: + self._daemon._deviceGuids[newConfiguration.id] = deviceGuid diff --git a/Products/ZenCollector/scheduler/__init__.py b/Products/ZenCollector/scheduler/__init__.py new file mode 100644 index 0000000000..3dd71c6b9b --- /dev/null +++ b/Products/ZenCollector/scheduler/__init__.py @@ -0,0 +1,20 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .scheduler import Scheduler, TaskScheduler +from .task import CallableTaskFactory, CallableTask + +__all__ = ( + "Scheduler", + "TaskScheduler", + "CallableTaskFactory", + "CallableTask" +) diff --git a/Products/ZenCollector/scheduler.py b/Products/ZenCollector/scheduler/scheduler.py similarity index 54% rename from Products/ZenCollector/scheduler.py rename to Products/ZenCollector/scheduler/scheduler.py index cbd562f61c..1dd5a662ac 100644 --- a/Products/ZenCollector/scheduler.py +++ b/Products/ZenCollector/scheduler/scheduler.py @@ -1,308 +1,38 @@ +############################################################################## # -# -# Copyright (C) Zenoss, Inc. 2009-2017 all rights reserved. +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. # -# - -from __future__ import print_function +############################################################################## -""" -Support for scheduling tasks and running them on a periodic interval. Tasks -are associated directly with a device, but multiple tasks may exist for a -single device or other monitored object. -""" +from __future__ import absolute_import import logging -import math -import os import random -import sys import time -from StringIO import StringIO - -import zope.interface +from collections import Sequence from twisted.internet import defer, reactor, task from twisted.python.failure import Failure +from zope.interface import implementer -from Products.ZenEvents import Event from Products.ZenUtils.Executor import TwistedExecutor -from Products.ZenUtils.keyedset import KeyedSet -from Products.ZenUtils.Utils import dumpCallbacks - -from .cyberark import get_cyberark -from .interfaces import ( - IScheduler, - IScheduledTask, - IPausingScheduledTask, -) -from .tasks import TaskStates - -log = logging.getLogger("zen.collector.scheduler") - - -class StateStatistics(object): - def __init__(self, state): - self.state = state - self.reset() - - def addCall(self, elapsedTime): - self.totalElapsedTime += elapsedTime - self.totalElapsedTimeSquared += elapsedTime**2 - self.totalCalls += 1 - - if self.totalCalls == 1: - self.minElapsedTime = elapsedTime - self.maxElapsedTime = elapsedTime - else: - self.minElapsedTime = min(self.minElapsedTime, elapsedTime) - self.maxElapsedTime = max(self.maxElapsedTime, elapsedTime) - - def reset(self): - self.totalElapsedTime = 0.0 - self.totalElapsedTimeSquared = 0.0 - self.totalCalls = 0 - self.minElapsedTime = 0xFFFFFFFF - self.maxElapsedTime = 0 - - @property - def mean(self): - return float(self.totalElapsedTime) / float(self.totalCalls) - - @property - def stddev(self): - if self.totalCalls == 1: - return 0 - else: - # see http://www.dspguide.com/ch2/2.htm for stddev of running stats - return math.sqrt( - ( - self.totalElapsedTimeSquared - - self.totalElapsedTime**2 / self.totalCalls - ) - / (self.totalCalls - 1) - ) - - -class TaskStatistics(object): - def __init__(self, task): - self.task = task - self.totalRuns = 0 - self.failedRuns = 0 - self.missedRuns = 0 - self.states = {} - self.stateStartTime = None - - def trackStateChange(self, oldState, newState): - now = time.time() - - # record how long we spent in the previous state, if there was one - if oldState is not None and self.stateStartTime: - # TODO: how do we properly handle clockdrift or when the clock - # changes, or is time.time() independent of that? - elapsedTime = now - self.stateStartTime - - if oldState in self.states: - stats = self.states[oldState] - else: - stats = StateStatistics(oldState) - self.states[oldState] = stats - stats.addCall(elapsedTime) - - self.stateStartTime = now - - -class CallableTask(object): - """ - A CallableTask wraps an object providing IScheduledTask so that it can be - treated as a callable object. This allows the scheduler to make use of the - Twisted framework's LoopingCall construct for simple interval-based - scheduling. - """ - - def __init__(self, task, scheduler, executor): - if not IScheduledTask.providedBy(task): - raise TypeError("task must provide IScheduledTask") - else: - self.task = task - - self.task._scheduler = scheduler - self._scheduler = scheduler - self._executor = executor - self.paused = False - self.taskStats = None - - def __repr__(self): - return "CallableTask: %s" % getattr(self.task, "name", self.task) - - def running(self): - """ - Called whenever this task is being run. - """ - try: - if hasattr(self.task, "missed"): - self.task._eventService.sendEvent( - { - "eventClass": "/Perf/MissedRuns", - "component": os.path.basename(sys.argv[0]).replace( - ".py", "" - ), - }, - device=self.task._devId, - summary="Task `{}` is being run.".format(self.task.name), - severity=Event.Clear, - eventKey=self.task.name, - ) - del self.task.missed - except Exception: - pass - self.taskStats.totalRuns += 1 - - def logTwistedTraceback(self, reason): - """ - Twisted errBack to record a traceback and log messages - """ - out = StringIO() - reason.printTraceback(out) - # This shouldn't be necessary except for dev code - log.debug(out.getvalue()) - out.close() - def finished(self, result): - """ - Called whenever this task has finished. - """ - if isinstance(result, Failure): - self.taskStats.failedRuns += 1 - self.logTwistedTraceback(result) - - def late(self): - """ - Called whenever this task is late and missed its scheduled run time. - """ - try: - # some tasks we don't want to consider a missed run. - if getattr(self.task, "suppress_late", False): - return - - # send event only for missed runs on devices. - self.task._eventService.sendEvent( - { - "eventClass": "/Perf/MissedRuns", - "component": os.path.basename(sys.argv[0]).replace( - ".py", "" - ), - }, - device=self.task._devId, - summary="Missed run: {}".format(self.task.name), - message=self._scheduler._displayStateStatistics( - "", self.taskStats.states - ), - severity=Event.Warning, - eventKey=self.task.name, - ) - self.task.missed = True - except Exception: - pass - self.taskStats.missedRuns += 1 - - def __call__(self): - if self.task.state is TaskStates.STATE_PAUSED and not self.paused: - self.task.state = TaskStates.STATE_IDLE - elif self.paused and self.task.state is not TaskStates.STATE_PAUSED: - self.task.state = TaskStates.STATE_PAUSED - - self._scheduler.setNextExpectedRun(self.task.name, self.task.interval) - - if self.task.state in [TaskStates.STATE_IDLE, TaskStates.STATE_PAUSED]: - if not self.paused: - self.task.state = TaskStates.STATE_QUEUED - # don't return deferred to looping call. - # If a deferred is returned to looping call - # it won't reschedule on error and will only - # reschedule after the deferred is done. This method - # should be called regardless of whether or - # not the task is still running to keep track - # of "late" tasks - d = self._executor.submit(self._doCall) - - def _callError(failure): - msg = "%s - %s failed %s" % ( - self.task, - self.task.name, - failure, - ) - log.debug(msg) - # don't return failure to prevent - # "Unhandled error in Deferred" message - return msg - - # l last error handler in the chain - d.addErrback(_callError) - else: - self._late() - # don't return a Deferred because we want LoopingCall to keep - # rescheduling so that we can keep track of late intervals - - def _doCall(self): - d = defer.maybeDeferred(self._run) - d.addBoth(self._finished) - - # dump the deferred chain if we're in ludicrous debug mode - if log.getEffectiveLevel() < logging.DEBUG: - print("Callback Chain for Task %s" % self.task.name) - dumpCallbacks(d) - return d - - def _run(self): - self.task.state = TaskStates.STATE_RUNNING - self.running() - - return self.task.doTask() - - def _finished(self, result): - log.debug("Task %s finished, result: %r", self.task.name, result) - - # Unless the task completed or paused itself, make sure - # that we always reset the state to IDLE once the task is finished. - if self.task.state != TaskStates.STATE_COMPLETED: - self.task.state = TaskStates.STATE_IDLE - - self._scheduler.taskDone(self.task.name) - - self.finished(result) - - if self.task.state == TaskStates.STATE_COMPLETED: - self._scheduler.removeTasksForConfig(self.task.configId) - - # return result for executor callbacks - return result - - def _late(self): - log.debug("Task %s skipped because it was not idle", self.task.name) - self.late() +from ..cyberark import get_cyberark +from ..interfaces import IScheduler, IPausingScheduledTask +from ..tasks import TaskStates +from .statistics import StateStatistics, TaskStatistics +from .task import CallableTaskFactory -class CallableTaskFactory(object): - """ - A factory that creates instances of CallableTask, allowing it to be - easily subclassed or replaced in different scheduler implementations. - """ - - def getCallableTask(self, newTask, scheduler): - return CallableTask(newTask, scheduler, scheduler.executor) - - -def getConfigId(task): - return task.configId +log = logging.getLogger("zen.collector.scheduler") -@zope.interface.implementer(IScheduler) -class Scheduler(object): +@implementer(IScheduler) +class TaskScheduler(object): """ A simple interval-based scheduler that makes use of the Twisted framework's LoopingCall construct. @@ -311,20 +41,29 @@ class Scheduler(object): CLEANUP_TASKS_INTERVAL = 10 # seconds ATTEMPTS = 3 - def __init__(self, callableTaskFactory=CallableTaskFactory()): + @classmethod + def make(cls, factory=None, executor=None): + factory = factory if factory is not None else CallableTaskFactory() + executor = executor if executor is not None else TwistedExecutor(1) + return cls(factory, executor) + + def __init__(self, factory, executor): self._loopingCalls = {} self._tasks = {} self._taskCallback = {} self._taskStats = {} - self._callableTaskFactory = callableTaskFactory + self._displaycounts = () self._shuttingDown = False + + self._factory = factory + self._executor = executor + # create a cleanup task that will periodically sweep the # cleanup dictionary for tasks that need to be cleaned - self._tasksToCleanup = KeyedSet(getConfigId) + self._tasksToCleanup = {} self._cleanupTask = task.LoopingCall(self._cleanupTasks) - self._cleanupTask.start(Scheduler.CLEANUP_TASKS_INTERVAL) + self._cleanupTask.start(TaskScheduler.CLEANUP_TASKS_INTERVAL) - self._executor = TwistedExecutor(1) self.cyberark = get_cyberark() # Ensure that we can cleanly shutdown all of our tasks @@ -338,6 +77,18 @@ def __init__(self, callableTaskFactory=CallableTaskFactory()): "after", "shutdown", self.shutdown, "after" ) + @property + def executor(self): + return self._executor + + @property + def maxTasks(self): + return self._executor.limit + + @maxTasks.setter + def maxTasks(self, value): + self._executor.limit = value + def __contains__(self, task): """ Returns True if the task has been added to the scheduler. Otherwise @@ -347,6 +98,42 @@ def __contains__(self, task): name = getattr(task, "name", task) return name in self._tasks + def addTask(self, newTask, callback=None, now=False): + """ + Add a new IScheduledTask object for the scheduler to run. + + @param newTask the task to schedule + @type newTask IScheduledTask + @param callback A callable invoked every time the task completes + @type callback callable + @param now Set True to run the task now + @type now boolean + """ + name = newTask.name + if name in self._tasks: + raise ValueError("Task with same name already exists: %s" % name) + callableTask = self._factory.getCallableTask(newTask, self) + loopingCall = task.LoopingCall(callableTask) + self._loopingCalls[name] = loopingCall + self._tasks[name] = callableTask + self._taskCallback[name] = callback + self.taskAdded(callableTask) + startDelay = getattr(newTask, "startDelay", None) + if startDelay is None: + startDelay = 0 if now else self._getStartDelay(newTask) + reactor.callLater(startDelay, self._startTask, newTask, startDelay) + + # just in case someone does not implement scheduled, lets be careful + scheduled = getattr(newTask, "scheduled", lambda x: None) + scheduled(self) + log.debug( + "added new task name=%s config-id=%s interval=%s start-delay=%s", + newTask.name, + newTask.configId, + newTask.interval, + startDelay, + ) + def shutdown(self, phase): """ The reactor shutdown has three phases for event types: @@ -371,7 +158,7 @@ def shutdown(self, phase): doomedTasks = [] stopQ = {} log.debug("In shutdown stage %s", phase) - for (taskName, taskWrapper) in self._tasks.iteritems(): + for taskName, taskWrapper in self._tasks.iteritems(): task = taskWrapper.task stopPhase = getattr(task, "stopPhase", "before") if ( @@ -381,10 +168,10 @@ def shutdown(self, phase): continue stopOrder = getattr(task, "stopOrder", 0) queue = stopQ.setdefault(stopOrder, []) - queue.append((taskName, taskWrapper, task)) + queue.append((taskName, taskWrapper)) for stopOrder in sorted(stopQ): - for (taskName, taskWrapper, task) in stopQ[stopOrder]: + for taskName, taskWrapper in stopQ[stopOrder]: loopTask = self._loopingCalls[taskName] if loopTask.running: log.debug("Stopping running task %s", taskName) @@ -394,7 +181,7 @@ def shutdown(self, phase): self.taskRemoved(taskWrapper) for taskName in doomedTasks: - self._tasksToCleanup.add(self._tasks[taskName].task) + self._tasksToCleanup[taskName] = self._tasks[taskName].task del self._loopingCalls[taskName] del self._tasks[taskName] @@ -403,121 +190,60 @@ def shutdown(self, phase): cleanupList = self._cleanupTasks() return defer.DeferredList(cleanupList) - @property - def executor(self): - return self._executor - - def _getMaxTasks(self): - return self._executor.getMax() - - def _setMaxTasks(self, max): - return self._executor.setMax(max) + def _startTask(self, task, delayed, attempts=0): + # If there's no LoopingCall or the LoopingCall is running, + # then there's nothing to do so return + loopingCall = self._loopingCalls.get(task.name) + if loopingCall is None or loopingCall.running: + return - maxTasks = property(_getMaxTasks, _setMaxTasks) + if task.name in self._tasksToCleanup: + delay = random.randint(0, int(task.interval / 2)) # noqa: S311 + delayed = delayed + delay + if attempts > TaskScheduler.ATTEMPTS: + del self._tasksToCleanup[task.name] + log.warn( + "exceeded max start attempts name=%s config-id=%s", + task.name, + task.configId, + ) + attempts = 0 + attempts += 1 + log.debug( + "waiting for cleanup name=%s config-id=%s " + "current-delay=%s delayed-so-far=%s attempts=%s", + task.name, + task.configId, + delay, + delayed, + attempts, + ) + reactor.callLater(delay, self._startTask, task, delayed, attempts) + else: + d = loopingCall.start(task.interval) + d.addBoth(self._ltCallback, task.name) + log.debug( + "started task name=%s config-id=%s interval=%s " + "delayed=%s attempts=%s", + task.name, + task.configId, + task.interval, + delayed, + attempts, + ) def _ltCallback(self, result, task_name): """last call back in the chain, if it gets called as an errBack the looping will stop - shouldn't be called since CallableTask doesn't return a deferred, here for sanity and debug""" if task_name in self._loopingCalls: - loopingCall = self._loopingCalls[task_name] - log.debug("call finished %s : %s", loopingCall, result) + log.debug("task finished name=%s result=%s", task_name, result) if isinstance(result, Failure): log.warn( "Failure in looping call, will not reschedule %s", task_name ) log.error("%s", result) - def _startTask( - self, result, task_name, interval, configId, delayed, attempts=0 - ): - """start the task using a callback so that its put at the bottom of - the Twisted event queue, to allow other processing to continue and - to support a task start-time jitter""" - if task_name in self._loopingCalls: - loopingCall = self._loopingCalls[task_name] - if not loopingCall.running: - if configId in self._tasksToCleanup: - delay = random.randint(0, int(interval / 2)) - delayed = delayed + delay - if attempts > Scheduler.ATTEMPTS: - obj = self._tasksToCleanup.pop_by_key(configId) - log.debug( - "Forced cleanup of %s. Task: %s", - configId, - obj.name, - ) - attempts = 0 - attempts += 1 - log.debug( - "Waiting for cleanup of %s. Task %s postponing its " - "start %d seconds (%d so far). Attempt: %s", - configId, - task_name, - delay, - delayed, - attempts, - ) - d = defer.Deferred() - d.addCallback( - self._startTask, - task_name, - interval, - configId, - delayed, - attempts, - ) - reactor.callLater(delay, d.callback, None) - else: - log.debug( - "Task %s starting (waited %d seconds) on %d " - "second intervals", - task_name, - delayed, - interval, - ) - d = loopingCall.start(interval) - d.addBoth(self._ltCallback, task_name) - - def addTask(self, newTask, callback=None, now=False): - """ - Add a new IScheduledTask to the scheduler for execution. - @param newTask the new task to schedule - @type newTask IScheduledTask - @param callback a callback to be notified each time the task completes - @type callback a Python callable - """ - if newTask.name in self._tasks: - raise ValueError("Task %s already exists" % newTask.name) - log.debug( - "add task %s, %s using %s second interval", - newTask.name, - newTask, - newTask.interval, - ) - callableTask = self._callableTaskFactory.getCallableTask(newTask, self) - loopingCall = task.LoopingCall(callableTask) - self._loopingCalls[newTask.name] = loopingCall - self._tasks[newTask.name] = callableTask - self._taskCallback[newTask.name] = callback - self.taskAdded(callableTask) - startDelay = getattr(newTask, "startDelay", None) - if startDelay is None: - startDelay = 0 if now else self._getStartDelay(newTask) - d = defer.Deferred() - d.addCallback( - self._startTask, - newTask.name, - newTask.interval, - newTask.configId, - startDelay, - ) - reactor.callLater(startDelay, d.callback, None) - - # just in case someone does not implement scheduled, lets be careful - scheduled = getattr(newTask, "scheduled", lambda x: None) - scheduled(self) - def _getStartDelay(self, task): """ amount of time to delay the start of a task. Prevents bunching up of @@ -525,7 +251,7 @@ def _getStartDelay(self, task): time. """ # simple delay of random number between 0 and half the task interval - delay = random.randint(0, int(task.interval / 2)) + delay = random.randint(0, int(task.interval / 2)) # noqa: S311 return delay def taskAdded(self, taskWrapper): @@ -573,7 +299,7 @@ def getTasksForConfig(self, configId): Get all tasks associated with the specified identifier. """ tasks = [] - for (taskName, taskWrapper) in self._tasks.iteritems(): + for taskWrapper in self._tasks.itervalues(): task = taskWrapper.task if task.configId == configId: tasks.append(task) @@ -601,34 +327,41 @@ def setNextExpectedRun(self, taskName, taskInterval): ) def removeTasks(self, taskNames): + # type: (Self, Sequence[str]) -> None """ Remove tasks """ + if not isinstance(taskNames, Sequence): + taskNames = tuple(taskNames) + doomedTasks = [] # child ids are any task that are children of the current task being # removed childIds = [] - for taskName in taskNames: - taskWrapper = self._tasks[taskName] + for name in taskNames: + taskWrapper = self._tasks[name] task = taskWrapper.task subIds = getattr(task, "childIds", None) if subIds: childIds.extend(subIds) - log.debug("Stopping task %s, %s", taskName, task) - if self._loopingCalls[taskName].running: - self._loopingCalls[taskName].stop() - - doomedTasks.append(taskName) + if self._loopingCalls[name].running: + self._loopingCalls[name].stop() + log.debug( + "stopped task name=%s config-id=%s", name, task.configId + ) + doomedTasks.append(name) self.taskRemoved(taskWrapper) for taskName in doomedTasks: task = self._tasks[taskName].task - self._tasksToCleanup.add(task) + self._tasksToCleanup[taskName] = task del self._loopingCalls[taskName] del self._tasks[taskName] self._displayTaskStatistics(task) del self._taskStats[taskName] - # TODO: ponder task statistics and keeping them around? + log.debug( + "removed task name=%s config-id=%s", task.name, task.configId + ) map(self.removeTasksForConfig, childIds) @@ -640,13 +373,15 @@ def removeTasksForConfig(self, configId): @type configId: string """ self.removeTasks( - taskName - for taskName, taskWrapper in self._tasks.iteritems() - if taskWrapper.task.configId == configId + tuple( + name + for name, wrapper in self._tasks.iteritems() + if wrapper.task.configId == configId + ) ) def pauseTasksForConfig(self, configId): - for (taskName, taskWrapper) in self._tasks.items(): + for taskName, taskWrapper in self._tasks.items(): task = taskWrapper.task if task.configId == configId: log.debug("Pausing task %s", taskName) @@ -654,7 +389,7 @@ def pauseTasksForConfig(self, configId): self.taskPaused(taskWrapper) def resumeTasksForConfig(self, configId): - for (taskName, taskWrapper) in self._tasks.iteritems(): + for taskName, taskWrapper in self._tasks.iteritems(): task = taskWrapper.task if task.configId == configId: log.debug("Resuming task %s", taskName) @@ -754,15 +489,23 @@ def displayStatistics(self, verbose): totalStateStats.maxElapsedTime, stats.maxElapsedTime ) - log.info( - "Tasks: %d Successful_Runs: %d Failed_Runs: %d Missed_Runs: %d " - "Queued_Tasks: %d Running_Tasks: %d ", + counts = ( totalTasks, totalRuns, totalFailedRuns, totalMissedRuns, - self.executor.queued, - self.executor.running, + self._executor.queued, + self._executor.running, + ) + if self._displaycounts != counts: + self._displaycounts = counts + logmethod = log.info + else: + logmethod = log.debug + logmethod( + "Tasks: %d Successful_Runs: %d Failed_Runs: %d " + "Missed_Runs: %d Queued_Tasks: %d Running_Tasks: %d ", + *counts ) if verbose: @@ -873,21 +616,21 @@ def _cleanupTasks(self): todoList = [ task - for task in self._tasksToCleanup + for task in self._tasksToCleanup.values() if self._isTaskCleanable(task) ] cleanupWaitList = [] - for task in todoList: + for item in todoList: # changing the state of the task will keep the next cleanup run # from processing it again - task.state = TaskStates.STATE_CLEANING + item.state = TaskStates.STATE_CLEANING if self._shuttingDown: # let the task know the scheduler is shutting down - task.state = TaskStates.STATE_SHUTDOWN - log.debug("Cleanup on task %s %s", task.name, task) - d = defer.maybeDeferred(task.cleanup) - d.addBoth(self._cleanupTaskComplete, task) + item.state = TaskStates.STATE_SHUTDOWN + log.debug("Cleanup on task %s %s", item.name, item) + d = defer.maybeDeferred(item.cleanup) + d.addBoth(self._cleanupTaskComplete, item) cleanupWaitList.append(d) return cleanupWaitList @@ -901,7 +644,7 @@ def _cleanupTaskComplete(self, result, task): result, task.name, ) - self._tasksToCleanup.discard(task) + del self._tasksToCleanup[task.name] return result def _isTaskCleanable(self, task): @@ -920,3 +663,12 @@ def resetStats(self, taskName): taskStats.totalRuns = 0 taskStats.failedRuns = 0 taskStats.missedRuns = 0 + + +class Scheduler(TaskScheduler): + """Backward compatibility layer.""" + + def __init__(self): + super(Scheduler, self).__init__( + CallableTaskFactory(), TwistedExecutor(1) + ) diff --git a/Products/ZenCollector/scheduler/statistics.py b/Products/ZenCollector/scheduler/statistics.py new file mode 100644 index 0000000000..90c653bbe9 --- /dev/null +++ b/Products/ZenCollector/scheduler/statistics.py @@ -0,0 +1,79 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import math +import time + + +class StateStatistics(object): + def __init__(self, state): + self.state = state + self.reset() + + def addCall(self, elapsedTime): + self.totalElapsedTime += elapsedTime + self.totalElapsedTimeSquared += elapsedTime**2 + self.totalCalls += 1 + + if self.totalCalls == 1: + self.minElapsedTime = elapsedTime + self.maxElapsedTime = elapsedTime + else: + self.minElapsedTime = min(self.minElapsedTime, elapsedTime) + self.maxElapsedTime = max(self.maxElapsedTime, elapsedTime) + + def reset(self): + self.totalElapsedTime = 0.0 + self.totalElapsedTimeSquared = 0.0 + self.totalCalls = 0 + self.minElapsedTime = 0xFFFFFFFF + self.maxElapsedTime = 0 + + @property + def mean(self): + return float(self.totalElapsedTime) / float(self.totalCalls) + + @property + def stddev(self): + if self.totalCalls == 1: + return 0 + else: + # see http://www.dspguide.com/ch2/2.htm for stddev of running stats + mean = self.totalElapsedTime**2 / self.totalCalls + return math.sqrt( + (self.totalElapsedTimeSquared - mean) / (self.totalCalls - 1) + ) + + +class TaskStatistics(object): + def __init__(self, task): + self.task = task + self.totalRuns = 0 + self.failedRuns = 0 + self.missedRuns = 0 + self.states = {} + self.stateStartTime = None + + def trackStateChange(self, oldState, newState): + now = time.time() + + # record how long we spent in the previous state, if there was one + if oldState is not None and self.stateStartTime: + # TODO: how do we properly handle clockdrift or when the clock + # changes, or is time.time() independent of that? + elapsedTime = now - self.stateStartTime + + if oldState in self.states: + stats = self.states[oldState] + else: + stats = StateStatistics(oldState) + self.states[oldState] = stats + stats.addCall(elapsedTime) + + self.stateStartTime = now diff --git a/Products/ZenCollector/scheduler/task.py b/Products/ZenCollector/scheduler/task.py new file mode 100644 index 0000000000..bac73eccbd --- /dev/null +++ b/Products/ZenCollector/scheduler/task.py @@ -0,0 +1,213 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging +import os +import sys + +from StringIO import StringIO + +from twisted.internet import defer +from twisted.python.failure import Failure + +from Products.ZenEvents import Event +from Products.ZenUtils.Utils import dumpCallbacks + +from ..interfaces import IScheduledTask +from ..tasks import TaskStates + +log = logging.getLogger("zen.collector.scheduler") + + +class CallableTask(object): + """ + A CallableTask wraps an IScheduledTask object to make it a callable. + This allows the scheduler to make use of the Twisted framework's + LoopingCall construct for simple interval-based scheduling. + """ + + def __init__(self, task, scheduler, executor): + if not IScheduledTask.providedBy(task): + raise TypeError("task must provide IScheduledTask") + + self.task = task + self.task._scheduler = scheduler + self._scheduler = scheduler + self._executor = executor + self.paused = False + self.taskStats = None + + def __repr__(self): + return "CallableTask: %s" % getattr(self.task, "name", self.task) + + def running(self): + """ + Called whenever this task is being run. + """ + if hasattr(self.task, "missed"): + self._send_clear_event() + self.taskStats.totalRuns += 1 + + def logTwistedTraceback(self, reason): + """ + Twisted errBack to record a traceback and log messages + """ + out = StringIO() + reason.printTraceback(out) + # This shouldn't be necessary except for dev code + log.debug(out.getvalue()) + out.close() + + def finished(self, result): + """ + Called whenever this task has finished. + """ + if isinstance(result, Failure): + self.taskStats.failedRuns += 1 + self.logTwistedTraceback(result) + + def late(self): + """ + Called whenever this task is late and missed its scheduled run time. + """ + # some tasks we don't want to consider a missed run. + if getattr(self.task, "suppress_late", False): + return + self._send_warning_event() + self.taskStats.missedRuns += 1 + + def _send_warning_event(self): + try: + # send event only for missed runs on devices. + self.task._eventService.sendEvent( + { + "eventClass": "/Perf/MissedRuns", + "component": os.path.basename(sys.argv[0]).replace( + ".py", "" + ), + }, + device=self.task._devId, + summary="Missed run: {}".format(self.task.name), + message=self._scheduler._displayStateStatistics( + "", self.taskStats.states + ), + severity=Event.Warning, + eventKey=self.task.name, + ) + self.task.missed = True + except Exception: + if log.isEnabledFor(logging.DEBUG): + log.exception("unable to send /Perf/MissedRuns warning event") + + def _send_clear_event(self): + try: + self.task._eventService.sendEvent( + { + "eventClass": "/Perf/MissedRuns", + "component": os.path.basename(sys.argv[0]).replace( + ".py", "" + ), + }, + device=self.task._devId, + summary="Task `{}` is being run.".format(self.task.name), + severity=Event.Clear, + eventKey=self.task.name, + ) + del self.task.missed + except Exception: + if log.isEnabledFor(logging.DEBUG): + log.exception("unable to send /Perf/MissedRuns clear event") + + def __call__(self): + if self.task.state is TaskStates.STATE_PAUSED and not self.paused: + self.task.state = TaskStates.STATE_IDLE + elif self.paused and self.task.state is not TaskStates.STATE_PAUSED: + self.task.state = TaskStates.STATE_PAUSED + + self._scheduler.setNextExpectedRun(self.task.name, self.task.interval) + + if self.task.state in [TaskStates.STATE_IDLE, TaskStates.STATE_PAUSED]: + if not self.paused: + self.task.state = TaskStates.STATE_QUEUED + # don't return deferred to looping call. + # If a deferred is returned to looping call + # it won't reschedule on error and will only + # reschedule after the deferred is done. This method + # should be called regardless of whether or + # not the task is still running to keep track + # of "late" tasks + d = self._executor.submit(self._doCall) + + def _callError(failure): + msg = "%s - %s failed %s" % ( + self.task, + self.task.name, + failure, + ) + log.debug(msg) + # don't return failure to prevent + # "Unhandled error in Deferred" message + return msg + + # l last error handler in the chain + d.addErrback(_callError) + else: + self._late() + # don't return a Deferred because we want LoopingCall to keep + # rescheduling so that we can keep track of late intervals + + def _doCall(self): + d = defer.maybeDeferred(self._run) + d.addBoth(self._finished) + + # dump the deferred chain if we're in ludicrous debug mode + if log.getEffectiveLevel() < logging.DEBUG: + print("Callback Chain for Task %s" % self.task.name) + dumpCallbacks(d) + return d + + def _run(self): + self.task.state = TaskStates.STATE_RUNNING + self.running() + + return self.task.doTask() + + def _finished(self, result): + log.debug("Task %s finished, result: %r", self.task.name, result) + + # Unless the task completed or paused itself, make sure + # that we always reset the state to IDLE once the task is finished. + if self.task.state != TaskStates.STATE_COMPLETED: + self.task.state = TaskStates.STATE_IDLE + + self._scheduler.taskDone(self.task.name) + + self.finished(result) + + if self.task.state == TaskStates.STATE_COMPLETED: + self._scheduler.removeTasksForConfig(self.task.configId) + + # return result for executor callbacks + return result + + def _late(self): + log.debug("Task %s skipped because it was not idle", self.task.name) + self.late() + + +class CallableTaskFactory(object): + """ + A factory that creates instances of CallableTask, allowing it to be + easily subclassed or replaced in different scheduler implementations. + """ + + def getCallableTask(self, newTask, scheduler): + return CallableTask(newTask, scheduler, scheduler.executor) diff --git a/Products/ZenCollector/services/ConfigCache.py b/Products/ZenCollector/services/ConfigCache.py new file mode 100644 index 0000000000..fe1f348ea2 --- /dev/null +++ b/Products/ZenCollector/services/ConfigCache.py @@ -0,0 +1,250 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2009, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import logging + +from zope.component import createObject + +from Products.ZenCollector.configcache.cache import DeviceKey, DeviceQuery +from Products.ZenHub.errors import translateError +from Products.ZenHub.HubService import HubService +from Products.ZenUtils.RedisUtils import getRedisClient, getRedisUrl + +from .optionsfilter import getOptionsFilter + + +class ConfigCache(HubService): + """ZenHub service for retrieving device configs from Redis.""" + + def __init__(self, dmd, monitor): + """ + Initializes a ConfigCache instance. + + :param dmd: the Zenoss DMD reference + :param monitor: the collector instance name + """ + HubService.__init__(self, dmd, monitor) + + # Get the collector information (eg the 'localhost' collector) + self._monitor = self.dmd.Monitors.Performance._getOb(self.instance) + + client = getRedisClient(url=getRedisUrl()) + self._stores = type( + "Stores", + (object,), + { + "device": createObject("deviceconfigcache-store", client), + "oidmap": createObject("oidmapcache-store", client), + }, + )() + + @translateError + def remote_getDeviceNames(self, servicename, options): + """ + Return device IDs. + + @param servicename: Name of the configuration service. + @type servicename: str + @rtype: ImmutableSequence[str] + """ + return tuple( + key.device + for key in self._filter(self._keys(servicename), options) + ) + + @translateError + def remote_getDeviceConfigs( + self, servicename, when, deviceids, options=None + ): + """ + Return a 'diff' of device configurations compared to `deviceids`. + + Device configurations that are new, updated, and removed relative + to the `deviceids` argument are returned using a JSON structure that + resembles the following: + + { + "new": [DeviceProxy, ...], + "updated": [DeviceProxy, ...], + "removed": [str, ...] + } + + The "new" entry will contain configurations for devices not found + in `deviceids`. + + The "updated" entry will contain configurations that have changed + since the `when` argument and are present in the `deviceids` argument. + + The "removed" entry will contain the IDs of devices found in + `deviceids` but are not found in the cache or the caller is no longer + responsible for those configurations. + + Device IDs appearing in `deviceids` but not included in the returned + data should be considered as having unchanged configurations. + + @param servicename: Name of the configuration service. + @type servicename: str + @param when: When the last set of devices was returned. + @type when: datetime.datetime + @param names: Names of the devices to compare against. + @type names: Sequence[str] + @rtype: ImmutableSequence[DeviceProxy] + """ + self.log.debug( + "[ConfigCache] getDeviceConfigs(%r, %r, %r, %r)", + servicename, + when, + deviceids, + options, + ) + previous = set(deviceids) + predicate = getOptionsFilter(options) + current_keys = tuple(self._filter(self._keys(servicename), predicate)) + + # 'newest_keys' references devices not found in 'previous' + newest_keys = ( + key for key in current_keys if key.device not in previous + ) + + # 'updated_keys' references newer configs found in 'previous' + updated_keys = ( + status.key + for status in self._stores.device.get_newer( + when, service=servicename, monitor=self.instance + ) + if status.key.device in previous + ) + + # 'removed' references devices found in 'previous' + # but not in 'current'. + current = {key.device for key in current_keys} + removed = previous - current + + return { + "new": list(self._get_configs(newest_keys)), + "updated": list(self._get_configs(updated_keys)), + "removed": list(removed), + } + + @translateError + def remote_getDeviceConfig(self, servicename, deviceid, options=None): + """ + Returns the configuration for the requested device or None. + + If the device does not exist or if the device is filtered out for + whatever reason, None is returned. + + Otherwise, the configuration for the device is returned. + + @param servicename: Name of the configuration service. + @type servicename: str + @param when: When the last set of devices was returned. + @type when: datetime.datetime + @param deviceid: Name of the device. + @type deviceid: str + @rtype: DeviceProxy | None + """ + self.log.info( + "[ConfigCache] getDeviceConfig(%r, %r, %r)", + servicename, + deviceid, + options, + ) + predicate = getOptionsFilter(options) + key = DeviceKey( + service=servicename, monitor=self.instance, device=deviceid + ) + filtered = tuple(self._filter([key], predicate)) + if len(filtered) == 0: + return None + if key not in self._stores.device: + return None + return self._stores.device.get(key).config + + def remote_getOidMap(self, checksum): + """ + Returns the current OID map if its checksum doesn't match `checksum`. + The checksum of the current OID map is returned as well. + + The return value is two element tuple. The first element is the + checksum and the second element is the json-ified oidmap. + + If the stored checksum and the `checksum` parameter are the same or + if there is no oidmap data, the return value is `(None, None)`. + + @rtype: Tuple[str, Dict] | None + """ + self.log.debug("[ConfigCache] getOidMap(%r)", checksum) + stored_checksum = self._stores.oidmap.get_checksum() + if stored_checksum == checksum: + return (None, None) + record = self._stores.oidmap.get() + return (record.checksum, record.oidmap) + + def _keys(self, servicename): + """ + Return all the device IDs associated with the current monitor and + the given configuration service. + + @param servicename: Name of the configuration service. + @type servicename: str + @rtype: Iterator[str] + """ + query = DeviceQuery(monitor=self.instance, service=servicename) + self.log.debug("[ConfigCache] using query %s", query) + return self._stores.device.search(query) + + def _filter(self, keys, predicate): + """ + Returns a subset of device IDs in `names` based on the contents + of the `options` parameter. + + @param keys: Cache config keys + @type keys: Iterable[CacheKey] + @param predicate: Function that determines whether to keep the device + @type options: Function(Device) -> Boolean + @rtype: Iterator[str] + """ + # _filter is a generator function returning Device objects + proxy = _DeviceProxy() + for key in keys: + try: + proxy.id = key.device + if predicate(proxy): + yield key + except Exception: + if self.log.isEnabledFor(logging.DEBUG): + method = self.log.exception + else: + method = self.log.warn + method("error filtering device ID %s", key.device) + + def _get_configs(self, keys): + if self.log.isEnabledFor(logging.DEBUG): + mlog = self.log.exception + else: + mlog = self.log.error + for key in keys: + try: + yield self._stores.device.get(key).config + except Exception as ex: + mlog( + "failed to retrieve config " + "error=%s service=%s collector=%s device=%s", + ex, + key.service, + key.monitor, + key.device, + ) + + +class _DeviceProxy(object): + # The predicate returned by getOptionsFilter expects an object + # with an `id` attribute. So make a simple class with one attribute. + id = None diff --git a/Products/ZenCollector/services/config.py b/Products/ZenCollector/services/config.py index 1efe4c8c8b..bf79bf1148 100644 --- a/Products/ZenCollector/services/config.py +++ b/Products/ZenCollector/services/config.py @@ -11,265 +11,143 @@ import hashlib import logging -from Acquisition import aq_parent from cryptography.fernet import Fernet -from twisted.internet import defer from twisted.spread import pb from ZODB.transact import transact -from zope import component +from Products.ZenHub.errors import translateError from Products.ZenHub.HubService import HubService -from Products.ZenHub.interfaces import IBatchNotifier -from Products.ZenHub.PBDaemon import translateError -from Products.ZenHub.services.Procrastinator import Procrastinate from Products.ZenHub.services.ThresholdMixin import ThresholdMixin -from Products.ZenHub.zodb import onUpdate, onDelete from Products.ZenModel.Device import Device -from Products.ZenModel.DeviceClass import DeviceClass -from Products.ZenModel.PerformanceConf import PerformanceConf -from Products.ZenModel.privateobject import is_private -from Products.ZenModel.RRDTemplate import RRDTemplate -from Products.ZenModel.ZenPack import ZenPack -from Products.ZenUtils.AutoGCObjectReader import gc_cache_every -from Products.ZenUtils.picklezipper import Zipper +from Products.ZenUtils.guid.interfaces import IGlobalIdentifier from Products.Zuul.utils import safe_hasattr as hasattr -from ..interfaces import IConfigurationDispatchingFilter +from .error import trapException +from .optionsfilter import getOptionsFilter +from .push import UpdateCollectorMixin class DeviceProxy(pb.Copyable, pb.RemoteCopy): - def __init__(self): - """ - Do not use base classes initializers - """ + """Used to proxy device objects to collection services.""" @property def configId(self): """ - This is the id used by the framework to keep track of configurations, + This is the ID used by the framework to keep track of configurations, what to run, delete etc... - Use this instead of id since certain daemons can use a - configuration id that is different than the underlying device id. + + Use this instead of `id` since certain daemons can use a + configuration ID that is different than the underlying device ID. """ - retval = getattr(self, "_config_id", None) - return retval if (retval is not None) else self.id + cfgId = getattr(self, "_config_id", None) + return cfgId if (cfgId is not None) else self.id @property def deviceGuid(self): - """ """ return getattr(self, "_device_guid", None) + def __eq__(self, other): + if isinstance(other, DeviceProxy): + return self.configId == other.configId + return NotImplemented + + def __hash__(self): + return hash(self.configId) + def __str__(self): - return self.id + return self.configId def __repr__(self): - return "%s:%s" % (self.__class__.__name__, self.id) + return "%s:%s" % (self.__class__.__name__, self.configId) pb.setUnjellyableForClass(DeviceProxy, DeviceProxy) -# TODO: doc me! +# Default attributes copied to every device proxy. BASE_ATTRIBUTES = ( "id", "manageIp", ) -class CollectorConfigService(HubService, ThresholdMixin): +class CollectorConfigService(HubService, UpdateCollectorMixin, ThresholdMixin): + """Base class for ZenHub configuration service classes.""" + def __init__(self, dmd, instance, deviceProxyAttributes=()): """ - Constructs a new CollectorConfig instance. + Initializes a CollectorConfigService instance. - Subclasses must call this __init__ method but cannot do so with - the super() since parents of this class are not new-style classes. - - @param dmd: the Zenoss DMD reference - @param instance: the collector instance name - @param deviceProxyAttributes: a tuple of names for device attributes - that should be copied to every device proxy created - @type deviceProxyAttributes: tuple + :param dmd: the Zenoss DMD reference + :param instance: the collector instance name + :param deviceProxyAttributes: a tuple of names for device attributes + that should be copied to every device proxy created + :type deviceProxyAttributes: tuple """ HubService.__init__(self, dmd, instance) + UpdateCollectorMixin.__init__(self) self._deviceProxyAttributes = BASE_ATTRIBUTES + deviceProxyAttributes # Get the collector information (eg the 'localhost' collector) - self._prefs = self.dmd.Monitors.Performance._getOb(self.instance) - self.config = self._prefs # TODO fix me, needed for ThresholdMixin - self.configFilter = None - - # When about to notify daemons about device changes, wait for a little - # bit to batch up operations. - self._procrastinator = Procrastinate(self._pushConfig) - self._reconfigProcrastinator = Procrastinate(self._pushReconfigure) - - self._notifier = component.getUtility(IBatchNotifier) + self.conf = self.dmd.Monitors.getPerformanceMonitor(self.instance) - def _wrapFunction(self, functor, *args, **kwargs): - """ - Call the functor using the arguments, - and trap any unhandled exceptions. - - @parameter functor: function to call - @type functor: method - @parameter args: positional arguments - @type args: array of arguments - @parameter kwargs: keyword arguments - @type kwargs: dictionary - @return: result of functor(*args, **kwargs) or None if failure - @rtype: result of functor - """ - try: - return functor(*args, **kwargs) - except Exception as ex: - msg = "Unhandled exception in zenhub service %s: %s" % ( - self.__class__, - str(ex), - ) - self.log.exception(msg) - - import traceback - from Products.ZenEvents.ZenEventClasses import Critical - - evt = dict( - severity=Critical, - component=str(self.__class__), - traceback=traceback.format_exc(), - summary=msg, - device=self.instance, - methodCall="%s(%s, %s)" % (functor.__name__, args, kwargs), - ) - self.sendEvent(evt) + @property + def configFilter(self): return None - @onUpdate(PerformanceConf) - def perfConfUpdated(self, object, event): - with gc_cache_every(1000, db=self.dmd._p_jar._db): - if object.id == self.instance: - for listener in self.listeners: - listener.callRemote( - "setPropertyItems", object.propertyItems() - ) - - @onUpdate(ZenPack) - def zenPackUpdated(self, object, event): - with gc_cache_every(1000, db=self.dmd._p_jar._db): - for listener in self.listeners: - try: - listener.callRemote( - "updateThresholdClasses", - self.remote_getThresholdClasses(), - ) - except Exception: - self.log.warning( - "Error notifying a listener of new classes" - ) - - @onUpdate(Device) - def deviceUpdated(self, object, event): - with gc_cache_every(1000, db=self.dmd._p_jar._db): - self._notifyAll(object) - - @onUpdate(None) # Matches all - def notifyAffectedDevices(self, object, event): - # FIXME: This is horrible - with gc_cache_every(1000, db=self.dmd._p_jar._db): - if isinstance(object, self._getNotifiableClasses()): - self._reconfigureIfNotify(object) - else: - if isinstance(object, Device): - return - # something else... mark the devices as out-of-date - template = None - while object: - # Don't bother with privately managed objects; the ZenPack - # will handle them on its own - if is_private(object): - return - # walk up until you hit an organizer or a device - if isinstance(object, RRDTemplate): - template = object - if isinstance(object, DeviceClass): - uid = (self.__class__.__name__, self.instance) - devfilter = None - if template: - devfilter = _HasTemplate(template, self.log) - self._notifier.notify_subdevices( - object, uid, self._notifyAll, devfilter - ) - break - - if isinstance(object, Device): - self._notifyAll(object) - break - - object = aq_parent(object) - - @onDelete(Device) - def deviceDeleted(self, object, event): - with gc_cache_every(1000, db=self.dmd._p_jar._db): - devid = object.id - collector = object.getPerformanceServer().getId() - # The invalidation is only sent to the collector where the - # deleted device was. - if collector == self.instance: - self.log.debug( - "Invalidation: Performing remote call to delete " - "device %s from collector %s", - devid, - self.instance, - ) - for listener in self.listeners: - listener.callRemote("deleteDevice", devid) - else: - self.log.debug( - "Invalidation: Skipping remote call to delete " - "device %s from collector %s", - devid, - self.instance, - ) - @translateError def remote_getConfigProperties(self): - return self._prefs.propertyItems() + try: + items = self.conf.propertyItems() + finally: + pass + return items @translateError def remote_getDeviceNames(self, options=None): - devices = self._getDevices( - deviceFilter=self._getOptionsFilter(options) - ) - return [x.id for x in self._filterDevices(devices)] - - def _getDevices(self, deviceNames=None, deviceFilter=None): - - if not deviceNames: - devices = filter(deviceFilter, self._prefs.devices()) - else: - devices = [] - for name in deviceNames: - device = self.dmd.Devices.findDeviceByIdExact(name) - if not device: - continue - else: - if deviceFilter(device): - devices.append(device) - return devices + return [ + device.id + for device in self._selectDevices(self.conf.devices(), options) + ] @translateError def remote_getDeviceConfigs(self, deviceNames=None, options=None): - deviceFilter = self._getOptionsFilter(options) - devices = self._getDevices(deviceNames, deviceFilter) - devices = self._filterDevices(devices) - - deviceConfigs = [] - for device in devices: - proxies = self._wrapFunction(self._createDeviceProxies, device) + if deviceNames: + devices = _getDevicesByName(self.dmd.Devices, deviceNames) + else: + devices = self.conf.devices() + selected_devices = self._selectDevices(devices, options) + configs = [] + for device in selected_devices: + proxies = trapException(self, self._createDeviceProxies, device) if proxies: - deviceConfigs.extend(proxies) + configs.extend(proxies) - self._wrapFunction(self._postCreateDeviceProxy, deviceConfigs) - return deviceConfigs + trapException(self, self._postCreateDeviceProxy, configs) + return configs + + def _selectDevices(self, devices, options): + # _selectDevices is a generator function returning Device objects. + # `devices` is an iterator returning Device objects. + # `options` is a dict-like object. + predicate = getOptionsFilter(options) + for device in devices: + try: + if all( + ( + predicate(device), + self._perfIdFilter(device), + self._filterDevice(device), + ) + ): + yield device + except Exception as ex: + if self.log.isEnabledFor(logging.DEBUG): + method = self.log.exception + else: + method = self.log.warn + method("error filtering device %r: %s", device, ex) @transact def _create_encryption_key(self): @@ -293,7 +171,7 @@ def remote_getEncryptionKey(self): # per collector daemon. s = hashlib.sha256() s.update(key) - s.update(self.__class__.__name__) + s.update(self.name()) return base64.urlsafe_b64encode(s.digest()) def _postCreateDeviceProxy(self, deviceConfigs): @@ -311,22 +189,21 @@ def _createDeviceProxy(self, device, proxy=None): instance, and then add any additional data to the proxy as their needs require. - @param device: the regular device object to create a proxy from - @return: a new device proxy object, or None if no proxy can be created - @rtype: DeviceProxy + :param device: the regular device object to create a proxy from + :type device: Products.ZenModel.Device + :return: a new device proxy object, or None if no proxy can be created + :rtype: DeviceProxy """ - proxy = proxy if (proxy is not None) else DeviceProxy() + proxy = DeviceProxy() if proxy is None else proxy # copy over all the attributes requested for attrName in self._deviceProxyAttributes: setattr(proxy, attrName, getattr(device, attrName, None)) if isinstance(device, Device): - from Products.ZenUtils.guid.interfaces import IGlobalIdentifier - guid = IGlobalIdentifier(device).getGUID() if guid: - setattr(proxy, "_device_guid", guid) + proxy._device_guid = guid return proxy def _filterDevice(self, device): @@ -346,61 +223,9 @@ def _filterDevice(self, device): not self.configFilter or self.configFilter(device) ) except AttributeError as e: - self.log.warn( - "got an attribute exception on device.monitorDevice()" - ) - self.log.debug(e) + self.log.warn("No such attribute device=%r error=%s", device, e) return False - def _getOptionsFilter(self, options): - def _alwaysTrue(x): - return True - - deviceFilter = _alwaysTrue - if options: - dispatchFilterName = ( - options.get("configDispatch", "") if options else "" - ) - filterFactories = dict( - component.getUtilitiesFor(IConfigurationDispatchingFilter) - ) - filterFactory = filterFactories.get( - dispatchFilterName, None - ) or filterFactories.get("", None) - if filterFactory: - deviceFilter = filterFactory.getFilter(options) or deviceFilter - return deviceFilter - - def _filterDevices(self, devices): - """ - Filters out devices from the provided list that should not be - converted into DeviceProxy instances and sent back to the collector - client. - - @param device: the device object to filter - @return: a list of devices that are to be included - @rtype: list - """ - filteredDevices = [] - - for dev in filter(None, devices): - try: - device = dev.primaryAq() - - if self._perfIdFilter(device) and self._filterDevice(device): - filteredDevices.append(device) - self.log.debug("Device %s included by filter", device.id) - else: - # don't use .id just in case there is something - # crazy returned. - self.log.debug("Device %r excluded by filter", device) - except Exception: - if self.log.isEnabledFor(logging.DEBUG): - self.log.exception("Got an exception filtering %r", dev) - else: - self.log.warn("Got an exception filtering %r", dev) - return filteredDevices - def _perfIdFilter(self, obj): """ Return True if obj is not a device (has no perfServer attribute) @@ -412,190 +237,14 @@ def _perfIdFilter(self, obj): or obj.perfServer.getRelatedId() == self.instance ) - def _notifyAll(self, object): - """ - Notify all instances (daemons) of a change for the device - """ - # procrastinator schedules a call to _pushConfig - self._procrastinator.doLater(object) - - def _pushConfig(self, device): - """ - push device config and deletes to relevent collectors/instances - """ - deferreds = [] - - if self._perfIdFilter(device) and self._filterDevice(device): - proxies = self._wrapFunction(self._createDeviceProxies, device) - if proxies: - self._wrapFunction(self._postCreateDeviceProxy, proxies) - else: - proxies = None - - prev_collector = ( - device.dmd.Monitors.primaryAq().getPreviousCollectorForDevice( - device.id - ) - ) - for listener in self.listeners: - if not proxies: - if hasattr(device, "getPerformanceServer"): - # The invalidation is only sent to the previous and - # current collectors. - if self.instance in ( - prev_collector, - device.getPerformanceServer().getId(), - ): - self.log.debug( - "Invalidation: Performing remote call for " - "device %s on collector %s", - device.id, - self.instance, - ) - deferreds.append( - listener.callRemote("deleteDevice", device.id) - ) - else: - self.log.debug( - "Invalidation: Skipping remote call for " - "device %s on collector %s", - device.id, - self.instance, - ) - else: - deferreds.append( - listener.callRemote("deleteDevice", device.id) - ) - self.log.debug( - "Invalidation: Performing remote call for " - "device %s on collector %s", - device.id, - self.instance, - ) - else: - options = self.listenerOptions.get(listener, None) - deviceFilter = self._getOptionsFilter(options) - for proxy in proxies: - if deviceFilter(proxy): - deferreds.append( - self._sendDeviceProxy(listener, proxy) - ) - - return defer.DeferredList(deferreds) - - def _sendDeviceProxy(self, listener, proxy): - """ - TODO - """ - return listener.callRemote("updateDeviceConfig", proxy) - - def sendDeviceConfigs(self, configs): - deferreds = [] - - def errback(failure): - self.log.critical( - "Unable to update configs for service instance %s: %s", - self.__class__.__name__, - failure, - ) - - for listener in self.listeners: - options = self.listenerOptions.get(listener, None) - deviceFilter = self._getOptionsFilter(options) - filteredConfigs = filter(deviceFilter, configs) - args = Zipper.dump(filteredConfigs) - d = listener.callRemote("updateDeviceConfigs", args).addErrback( - errback - ) - deferreds.append(d) - return deferreds - - # FIXME: Don't use _getNotifiableClasses, use @onUpdate(myclasses) - def _getNotifiableClasses(self): - """ - a tuple of classes. When any object of a type in the sequence is - modified the collector connected to the service will be notified to - update its configuration - - @rtype: tuple - """ - return () - - def _pushReconfigure(self, value): - """ - notify the collector to reread the entire configuration - """ - # value is unused but needed for the procrastinator framework - for listener in self.listeners: - listener.callRemote("notifyConfigChanged") - self._reconfigProcrastinator.clear() - - def _reconfigureIfNotify(self, object): - ncc = self._notifyConfigChange(object) - self.log.debug( - "services/config.py _reconfigureIfNotify object=%r " - "_notifyConfigChange=%s", - object, - ncc, - ) - if ncc: - self.log.debug("scheduling collector reconfigure") - self._reconfigProcrastinator.doLater(True) - - def _notifyConfigChange(self, object): - """ - Called when an object of a type from _getNotifiableClasses is - encountered - @return: should a notify config changed be sent - @rtype: boolean - """ - return True - -class _HasTemplate(object): - """Predicate class that checks whether a given device has a template - matching the given template. - """ - - def __init__(self, template, log): - self.template = template - self.log = log - - def __call__(self, device): - if issubclass(self.template.getTargetPythonClass(), Device): - if self.template in device.getRRDTemplates(): - self.log.debug( - "%s bound to template %s", - device.getPrimaryId(), - self.template.getPrimaryId(), - ) - return True - else: - self.log.debug( - "%s not bound to template %s", - device.getPrimaryId(), - self.template.getPrimaryId(), - ) - return False - else: - # check components, Too expensive? - for comp in device.getMonitoredComponents( - type=self.template.getTargetPythonClass().meta_type - ): - if self.template in comp.getRRDTemplates(): - self.log.debug( - "%s bound to template %s", - comp.getPrimaryId(), - self.template.getPrimaryId(), - ) - return True - else: - self.log.debug( - "%s not bound to template %s", - comp.getPrimaryId(), - self.template.getPrimaryId(), - ) - return False +def _getDevicesByName(ctx, names): + # Returns a generator that produces Device objects. + return ( + device + for device in (ctx.findDeviceByIdExact(name) for name in names) + if device is not None + ) class NullConfigService(CollectorConfigService): diff --git a/Products/ZenCollector/services/error.py b/Products/ZenCollector/services/error.py new file mode 100644 index 0000000000..b7590fb577 --- /dev/null +++ b/Products/ZenCollector/services/error.py @@ -0,0 +1,46 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import traceback + +from Products.ZenEvents.ZenEventClasses import Critical + + +def trapException(service, functor, *args, **kwargs): + """ + Call the functor using the arguments and trap unhandled exceptions. + + :parameter functor: function to call. + :type functor: Callable[Any, Any] + :parameter args: positional arguments to functor. + :type args: Sequence[Any] + :parameter kwargs: keyword arguments to functor. + :type kwargs: Map[Any, Any] + :returns: result of calling functor(*args, **kwargs) + or None if functor raises an exception. + :rtype: Any + """ + try: + return functor(*args, **kwargs) + except Exception as ex: + msg = "Unhandled exception in zenhub service %s: %s" % ( + service.__class__, + ex, + ) + service.log.exception(msg) + service.sendEvent( + { + "severity": Critical, + "component": str(service.__class__), + "traceback": traceback.format_exc(), + "summary": msg, + "device": service.instance, + "methodCall": "%s(%s, %s)" % (functor.__name__, args, kwargs), + } + ) diff --git a/Products/ZenCollector/services/optionsfilter.py b/Products/ZenCollector/services/optionsfilter.py new file mode 100644 index 0000000000..2593773c29 --- /dev/null +++ b/Products/ZenCollector/services/optionsfilter.py @@ -0,0 +1,30 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from zope.component import getUtilitiesFor +from ..interfaces import IConfigurationDispatchingFilter + + +def getOptionsFilter(options): + if options: + name = options.get("configDispatch", "") if options else "" + factories = dict(getUtilitiesFor(IConfigurationDispatchingFilter)) + factory = factories.get(name) + if factory is None: + factory = factories.get("") + if factory is not None: + devicefilter = factory.getFilter(options) + if devicefilter: + return devicefilter + + return _alwaysTrue + + +def _alwaysTrue(*args): + return True diff --git a/Products/ZenCollector/services/push.py b/Products/ZenCollector/services/push.py new file mode 100644 index 0000000000..db03912c63 --- /dev/null +++ b/Products/ZenCollector/services/push.py @@ -0,0 +1,40 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from Products.ZenHub.zodb import onUpdate +from Products.ZenModel.PerformanceConf import PerformanceConf +from Products.ZenModel.ZenPack import ZenPack +from Products.ZenUtils.AutoGCObjectReader import gc_cache_every + + +class UpdateCollectorMixin(object): + """Push data back to collection daemons.""" + + @onUpdate(PerformanceConf) + def perfConfUpdated(self, conf, event): + with gc_cache_every(1000, db=self.dmd._p_jar._db): + if conf.id == self.instance: + for listener in self.listeners: + listener.callRemote( + "setPropertyItems", conf.propertyItems() + ) + + @onUpdate(ZenPack) + def zenPackUpdated(self, zenpack, event): + with gc_cache_every(1000, db=self.dmd._p_jar._db): + for listener in self.listeners: + try: + listener.callRemote( + "updateThresholdClasses", + self.remote_getThresholdClasses(), + ) + except Exception: + self.log.warning( + "Error notifying a listener of new threshold classes" + ) diff --git a/Products/ZenCollector/statistics.py b/Products/ZenCollector/statistics.py new file mode 100644 index 0000000000..89edbd794e --- /dev/null +++ b/Products/ZenCollector/statistics.py @@ -0,0 +1,72 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from zope.component.factory import Factory +from zope.interface import implementer + +from .interfaces import IStatistic, IStatisticsService + + +@implementer(IStatistic) +class Statistic(object): + def __init__(self, name, type, **kwargs): + self.value = 0 + self.name = name + self.type = type + self.kwargs = kwargs + + +@implementer(IStatisticsService) +class StatisticsService(object): + def __init__(self): + self._stats = {} + + def addStatistic(self, name, type, **kwargs): + if name in self._stats: + raise NameError("Statistic %s already exists" % name) + + if type not in ("DERIVE", "COUNTER", "GAUGE"): + raise TypeError("Statistic type %s not supported" % type) + + stat = Statistic(name, type, **kwargs) + self._stats[name] = stat + + def getStatistic(self, name): + return self._stats[name] + + def postStatistics(self, rrdStats): + for stat in self._stats.values(): + # figure out which function to use to post this statistical data + try: + func = { + "COUNTER": rrdStats.counter, + "GAUGE": rrdStats.gauge, + "DERIVE": rrdStats.derive, + }[stat.type] + except KeyError: + raise TypeError("Statistic type %s not supported" % stat.type) + + # These should always come back empty now because DaemonStats + # posts the events for us + func(stat.name, stat.value, **stat.kwargs) + + # counter is an ever-increasing value, but otherwise... + if stat.type != "COUNTER": + stat.value = 0 + + +class StatisticsServiceFactory(Factory): + """Factory for StatisticsService objects.""" + + def __init__(self): + super(StatisticsServiceFactory, self).__init__( + StatisticsService, + "StatisticsService", + "Creates a StatisticsService instance", + ) diff --git a/Products/ZenCollector/tasks.py b/Products/ZenCollector/tasks.py index 49ae3658b0..89695174cd 100644 --- a/Products/ZenCollector/tasks.py +++ b/Products/ZenCollector/tasks.py @@ -7,29 +7,22 @@ # ############################################################################## +from __future__ import division + import logging import random from copy import copy +import six + import zope.component import zope.interface -from twisted.internet import defer - -from Products.ZenEvents.ZenEventClasses import Cmd_Fail, Error from Products.ZenUtils.observable import ObservableMixin from Products.ZenUtils.Utils import readable_time -from .interfaces import ( - ICollector, - ICollectorWorker, - IScheduledTask, - IScheduledTaskFactory, - ISubTaskSplitter, - ITaskSplitter, - IWorkerExecutor, -) +from .interfaces import IScheduledTaskFactory, ISubTaskSplitter, ITaskSplitter log = logging.getLogger("zen.collector.tasks") @@ -71,7 +64,7 @@ def _delayNextCheck(self): """ # If it's not responding, don't poll it so often if self.interval != self._maxbackoffseconds: - delay = random.randint(int(self.interval / 2), self.interval) * 2 + delay = _randomize_delay(self.interval) self.interval = min(self._maxbackoffseconds, self.interval + delay) log.debug( "Delaying next check for another %s", @@ -96,7 +89,11 @@ def chunk(self, lst, n): """ Break lst into n-sized chunks """ - return [lst[i : i + n] for i in xrange(0, len(lst), n)] + return [lst[i : i + n] for i in six.moves.range(0, len(lst), n)] + + +def _randomize_delay(interval): + return random.randint(interval // 2, interval) * 2 # noqa: S311 @zope.interface.implementer(ITaskSplitter) @@ -232,229 +229,6 @@ def reset(self): self.config = None -class RRDWriter(object): - def __init__(self, delegate): - self.delegate = delegate - - def writeRRD(self, counter, countervalue, countertype, **kwargs): - """ - write given data to RRD streaming files - """ - self.delegate.writeRRD(counter, countervalue, countertype, **kwargs) - - -class EventSender(object): - def __init__(self, delegate): - self.delegate = delegate - - def sendEvent(self, event, **eventData): - evt = event.copy() - evt.update(eventData) - self.delegate.sendEvent(evt) - - -class WorkerOutputProxy(object): - def __init__(self, daemon=None, rrdWriter=None, eventSender=None): - self.daemon = daemon - self.rrdWriter = rrdWriter if not daemon else RRDWriter(daemon) - self.eventSender = eventSender if not daemon else EventSender(daemon) - - @defer.inlineCallbacks - def sendOutput(self, data, events, intervalSeconds): - if self.rrdWriter: - for d in data: - yield self.rrdWriter.writeRRD( - d["path"], - d["value"], - d["rrdType"], - rrdCommand=d["rrdCommand"], - cycleTime=intervalSeconds, - min=d["min"], - max=d["max"], - ) - - if self.eventSender: - for ev in events: - self.sendEvent(ev) - - @defer.inlineCallbacks - def sendEvent(self, event): - yield self.eventSender.sendEvent(event) - - -@zope.interface.implementer(IScheduledTask) -class SingleWorkerTask(ObservableMixin): - def __init__( - self, deviceId, taskName, scheduleIntervalSeconds, taskConfig - ): - """ - Construct a new task instance to fetch data from the configured - worker object. - - @param deviceId: the Zenoss deviceId to watch - @type deviceId: string - @param taskName: the unique identifier for this task - @type taskName: string - @param scheduleIntervalSeconds: the interval at which this task will be - collected - @type scheduleIntervalSeconds: int - @param taskConfig: the configuration for this task - """ - super(SingleWorkerTask, self).__init__() - - self.name = taskName - self.configId = deviceId - self.interval = scheduleIntervalSeconds - self.state = TaskStates.STATE_IDLE - - self._taskConfig = taskConfig - self._devId = deviceId - self._manageIp = self._taskConfig.manageIp - self._worker = None - - self.daemon = zope.component.getUtility(ICollector) - self.outputProxy = WorkerOutputProxy(self.daemon) - self.component = self.daemon.preferences.collectorName - - options = self.daemon.options - taskOptionDict = dict( - (attr, value) - for (attr, value) in options.__dict__.items() - if value is not None - and not attr.startswith("_") - and not callable(value) - ) - self._taskConfig.options = taskOptionDict - - @property - def deviceId(self): - return self._devId - - @property - def worker(self): - """ - Instance of the worker class to use for all tasks - """ - return self._worker - - @worker.setter - def worker(self, value): - self._worker = value - - @defer.inlineCallbacks - def cleanup(self): - """ - Delegate cleanup directly to the worker object - """ - try: - self.state = TaskStates.STATE_CLEANING - if self.worker: - yield self.worker.stop() - finally: - self.state = TaskStates.STATE_COMPLETED - - @defer.inlineCallbacks - def doTask(self): - """ - Delegate collection directly work to the worker object - """ - results = None - try: - self.state = TaskStates.STATE_RUNNING - if self.worker: - # perform data collection in the worker object - results = yield self.worker.collect( - self._devId, self._taskConfig - ) - - except Exception as ex: - log.error( - "worker collection: results (exception) = %r (%s)", results, ex - ) - collectionErrorEvent = { - "device": self.deviceId, - "severity": Error, - "eventClass": Cmd_Fail, - "summary": "Exception collecting:" + str(ex), - "component": self.component, - "agent": self.component, - } - yield self.outputProxy.sendEvent(collectionErrorEvent) - - else: - if results: - # send the data through the output proxy - data, events = results - if "testcounter" in self._taskConfig.options: - testCounter = self._taskConfig.options["testcounter"] - for dp in data: - if dp["counter"] == testCounter: - log.info( - "Collected value for %s: %s (%s)", - dp["counter"], - dp["value"], - dp["path"], - ) - break - else: - log.info( - "No value collected for %s from device %s", - testCounter, - self._devId, - ) - log.debug( - "Valid counters: %s", - [dp["counter"] for dp in data], - ) - - yield self.outputProxy.sendOutput(data, events, self.interval) - - finally: - self.state = TaskStates.STATE_IDLE - - -@zope.interface.implementer(IScheduledTaskFactory) -class SingleWorkerTaskFactory(SimpleTaskFactory): - """ - A task factory that creates a scheduled task using the provided - task class and the minimum attributes needed for a task, plus redirects - the 'doTask' and 'cleanup' methods to a single ICollectorWorker instance. - """ - - def __init__(self, taskClass=SingleWorkerTask, iCollectorWorker=None): - super(SingleWorkerTaskFactory, self).__init__(taskClass) - self.workerClass = iCollectorWorker - - def setWorkerClass(self, iCollectorWorker): - self.workerClass = iCollectorWorker - - def postInitialization(self): - pass - - def build(self): - task = super(SingleWorkerTaskFactory, self).build() - if self.workerClass and ICollectorWorker.implementedBy( - self.workerClass - ): - worker = self.workerClass() - worker.prepareToRun() - task.worker = worker - return task - - -@zope.interface.implementer(IWorkerExecutor) -class NullWorkerExecutor(object): - """ - IWorkerExecutor that does nothing with the provided worker - """ - - def setWorkerClass(self, workerClass): - pass - - def run(self): - pass - - class TaskStates(object): STATE_IDLE = "IDLE" STATE_CONNECTING = "CONNECTING" diff --git a/Products/ZenCollector/tests/testConfig.py b/Products/ZenCollector/tests/testConfig.py index 811364f468..d0bbfab5bb 100644 --- a/Products/ZenCollector/tests/testConfig.py +++ b/Products/ZenCollector/tests/testConfig.py @@ -13,11 +13,11 @@ from cryptography.fernet import Fernet from twisted.internet import defer -from Products.ZenCollector.config import ConfigurationProxy -from Products.ZenCollector.interfaces import ICollector, ICollectorPreferences - from Products.ZenTestCase.BaseTestCase import BaseTestCase +from ..config import ConfigurationProxy +from ..interfaces import ICollector, ICollectorPreferences + @zope.interface.implementer(ICollector) class MyCollector(object): @@ -31,9 +31,6 @@ def remote_getThresholdClasses(self): def remote_getCollectorThresholds(self): return defer.succeed(["yabba dabba do", "ho ho hum"]) - def remote_getDeviceConfigs(self, devices=[]): - return defer.succeed(["hmm", "foo", "bar"]) - def remote_getEncryptionKey(self): return defer.succeed(Fernet.generate_key()) @@ -44,11 +41,20 @@ def callRemote(self, methodName, *args, **kwargs): return self.remote_getThresholdClasses() elif methodName == "getCollectorThresholds": return self.remote_getCollectorThresholds() - elif methodName == "getDeviceConfigs": - return self.remote_getDeviceConfigs(args) elif methodName == "getEncryptionKey": return self.remote_getEncryptionKey() + class MyConfigCacheProxy(object): + def remote_getDeviceConfigs(self, devices=[]): + return defer.succeed(["hmm", "foo", "bar"]) + + def callRemote(self, methodName, *args, **kwargs): + if methodName == "getDeviceConfigs": + return self.remote_getDeviceConfigs(args) + + def getRemoteConfigCacheProxy(self): + return MyCollector.MyConfigCacheProxy() + def getRemoteConfigServiceProxy(self): return MyCollector.MyConfigServiceProxy() @@ -64,8 +70,10 @@ class Dummy(object): class MyPrefs(object): def __init__(self): self.collectorName = "testcollector" + self.configurationService = "MyConfigService" self.options = Dummy() self.options.monitor = "localhost" + self.options.workerid = 0 class TestConfig(BaseTestCase): @@ -78,9 +86,9 @@ def validate(result): self.assertEquals(result["foobar"], "abcxyz") return result - cfgService = ConfigurationProxy() prefs = MyPrefs() - d = cfgService.getPropertyItems(prefs) + cfgService = ConfigurationProxy(prefs) + d = cfgService.getPropertyItems() d.addBoth(validate) return d @@ -89,10 +97,10 @@ def validate(result): self.assertTrue("Products.ZenModel.FooBarThreshold" in result) return result - cfgService = ConfigurationProxy() prefs = MyPrefs() + cfgService = ConfigurationProxy(prefs) - d = cfgService.getThresholdClasses(prefs) + d = cfgService.getThresholdClasses() d.addBoth(validate) return d @@ -102,10 +110,10 @@ def validate(result): self.assertTrue("ho ho hum" in result) return result - cfgService = ConfigurationProxy() prefs = MyPrefs() + cfgService = ConfigurationProxy(prefs) - d = cfgService.getThresholds(prefs) + d = cfgService.getThresholds() d.addBoth(validate) return d @@ -115,15 +123,17 @@ def validate(result): self.assertFalse("abcdef" in result) return result - cfgService = ConfigurationProxy() prefs = MyPrefs() + cfgService = ConfigurationProxy(prefs) + token = 10 + deviceIds = [] - d = cfgService.getConfigProxies(prefs) + d = cfgService.getConfigProxies(token, deviceIds) d.addBoth(validate) - return d def testCrypt(self): - cfgService = ConfigurationProxy() + prefs = MyPrefs() + cfgService = ConfigurationProxy(prefs) s = "this is a string I wish to encrypt" diff --git a/Products/ZenCollector/tests/testFactory.py b/Products/ZenCollector/tests/testFactory.py index 4f9e6816be..d37d7fcfa6 100644 --- a/Products/ZenCollector/tests/testFactory.py +++ b/Products/ZenCollector/tests/testFactory.py @@ -7,11 +7,12 @@ # ############################################################################## -from Products.ZenCollector import CoreCollectorFrameworkFactory -from Products.ZenCollector.config import ConfigurationProxy -from Products.ZenCollector.scheduler import Scheduler from Products.ZenTestCase.BaseTestCase import BaseTestCase +from ..frameworkfactory import CoreCollectorFrameworkFactory +from ..config import ConfigurationProxy +from ..scheduler import TaskScheduler + class TestFactory(BaseTestCase): def testFactoryInstall(self): @@ -24,7 +25,7 @@ def testFactoryInstall(self): self.assertTrue(isinstance(configProxy, ConfigurationProxy)) scheduler = factory.getScheduler() - self.assertTrue(isinstance(scheduler, Scheduler)) + self.assertTrue(isinstance(scheduler, TaskScheduler)) def test_suite(): diff --git a/Products/ZenCollector/tests/test_cyberark.py b/Products/ZenCollector/tests/test_cyberark.py index e0d275d1a6..f198aa716b 100644 --- a/Products/ZenCollector/tests/test_cyberark.py +++ b/Products/ZenCollector/tests/test_cyberark.py @@ -29,19 +29,18 @@ CyberArkProperty, ) -PATH = {'src': 'Products.ZenCollector.cyberark'} +PATH = {"src": "Products.ZenCollector.cyberark"} class TestFunctions(TestCase): - def setUp(t): t.log_patcher = patch("{src}.log".format(**PATH), autospec=True) t.log = t.log_patcher.start() t.addCleanup(t.log_patcher.stop) t.getGlobalConfiguration_patcher = patch( - '{src}.getGlobalConfiguration'.format(**PATH), - name='getGlobalConfiguration', + "{src}.getGlobalConfiguration".format(**PATH), + name="getGlobalConfiguration", autospec=True, ) t.getGlobalConfiguration = t.getGlobalConfiguration_patcher.start() @@ -98,7 +97,6 @@ def test_get_cyberark_success(t, _manager): class TestCyberArk(TestCase): - class Conf(object): def __init__(self, query): self.configId = "dev1" @@ -111,7 +109,8 @@ def setUp(t): t.addCleanup(t.log_patcher.stop) t.queryUtility_patcher = patch( - "{src}.queryUtility".format(**PATH), autospec=True, + "{src}.queryUtility".format(**PATH), + autospec=True, ) t.queryUtility = t.queryUtility_patcher.start() t.addCleanup(t.queryUtility_patcher.stop) @@ -190,14 +189,14 @@ def test_update_config_no_props(t): class TestCyberArkManager(TestCase): - def setUp(t): t.log_patcher = patch("{src}.log".format(**PATH), autospec=True) t.log = t.log_patcher.start() t.addCleanup(t.log_patcher.stop) t.queryUtility_patcher = patch( - "{src}.queryUtility".format(**PATH), autospec=True, + "{src}.queryUtility".format(**PATH), + autospec=True, ) t.queryUtility = t.queryUtility_patcher.start() t.addCleanup(t.queryUtility_patcher.stop) @@ -277,9 +276,13 @@ def test_update_error(t): "Bad CyberArk query " "status=%s %s device=%s zproperty=%s query=%s " "ErrorCode=%s ErrorMsg=%s", - status, httplib.responses.get(status), - devId, zprop, query, - "4E", "object not found", + status, + httplib.responses.get(status), + devId, + zprop, + query, + "4E", + "object not found", ) mgr.add(devId, zprop, query) @@ -306,8 +309,11 @@ def test_update_unexpected_error(t): "Bad CyberArk query " "status=%s %s device=%s zproperty=%s query=%s " "result=%s", - status, httplib.responses.get(status), - devId, zprop, query, + status, + httplib.responses.get(status), + devId, + zprop, + query, "Unexpected format", ) @@ -333,7 +339,10 @@ def test_update_failure(t): expected = call( "Failed to execute CyberArk query - %s " "device=%s zproperty=%s query=%s", - ex, devId, zprop, query, + ex, + devId, + zprop, + query, ) mgr.add(devId, zprop, query) @@ -346,26 +355,28 @@ def test_update_failure(t): class TestCyberArkClient(TestCase): - def setUp(t): t.log_patcher = patch("{src}.log".format(**PATH), autospec=True) t.log = t.log_patcher.start() t.addCleanup(t.log_patcher.stop) t.load_certificates_patcher = patch( - "{src}.load_certificates".format(**PATH), autospec=True, + "{src}.load_certificates".format(**PATH), + autospec=True, ) t.load_certificates = t.load_certificates_patcher.start() t.addCleanup(t.load_certificates_patcher.stop) t.agent_patcher = patch( - "{src}.client.Agent".format(**PATH), autospec=True, + "{src}.client.Agent".format(**PATH), + autospec=True, ) t.agent = t.agent_patcher.start() t.addCleanup(t.agent_patcher.stop) t.readBody_patcher = patch( - "{src}.client.readBody".format(**PATH), autospec=True, + "{src}.client.readBody".format(**PATH), + autospec=True, ) t.readBody = t.readBody_patcher.start() t.addCleanup(t.readBody_patcher.stop) @@ -419,10 +430,7 @@ def test_request(t): t.assertEqual(result, expected_result) t.assertEqual(code, expected_code) ag.request.assert_called_once_with( - "GET", - "https://vault/bar/baz?appid=foo&object=foo", - None, - None + "GET", "https://vault/bar/baz?appid=foo&object=foo", None, None ) def test_request_with_extra_path(t): @@ -439,7 +447,7 @@ def test_request_with_extra_path(t): "GET", "https://vault/alias/bar/baz?appid=foo&object=foo", None, - None + None, ) def test_request_failure(t): @@ -474,7 +482,6 @@ def go_boom(*args, **kw): class TestCyberArkProperty(TestCase): - def test_init(t): dev = "device1" zprop = "prop1" @@ -488,67 +495,136 @@ def test_init(t): t.assertIsNone(prop.value) +# openssl genrsa -aes256 -passout pass:qwerty -out ca.pass.key 4096 +# openssl rsa -passin pass:qwerty -in ca.pass.key -out ca.key +# openssl req -new -x509 -days 3650 -key ca.key -out ca.crt rootCA_crt = """ -----BEGIN CERTIFICATE----- -MIIC1jCCAj+gAwIBAgIUDmS1sDHq5ZxY2xMz3OVPbT/LjfgwDQYJKoZIhvcNAQEL -BQAwfTELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVRleGFzMQ8wDQYDVQQHDAZBdXN0 -aW4xDzANBgNVBAoMBlplbm9zczEMMAoGA1UECwwDRGV2MQ8wDQYDVQQDDAZaZW5v -c3MxHTAbBgkqhkiG9w0BCQEWDmRldkB6ZW5vc3MuY29tMB4XDTIyMDEyNjE3MDc0 -NFoXDTI2MTIzMTE3MDc0NFowfTELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVRleGFz -MQ8wDQYDVQQHDAZBdXN0aW4xDzANBgNVBAoMBlplbm9zczEMMAoGA1UECwwDRGV2 -MQ8wDQYDVQQDDAZaZW5vc3MxHTAbBgkqhkiG9w0BCQEWDmRldkB6ZW5vc3MuY29t -MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQD5NUfsKhsfkDOQfiuJCzdk3GHD -A6J2ISD0cCRyhfqLWbu6Gz6yjmLMSrwqzp9xSPqbHTo3uC916aRdOREnOLeeNgMD -eHTQKbtEooNMXaeU0WwTHbWmsT6XI8tifAiMFsALsuZtXrObr1NFWPMSxOdrqnjg -FycFdbZB6Rvys1hiaQIDAQABo1MwUTAdBgNVHQ4EFgQU3RLbuadNNemGXzwMtv+P -+PytrgswHwYDVR0jBBgwFoAU3RLbuadNNemGXzwMtv+P+PytrgswDwYDVR0TAQH/ -BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQBXRCxYTdityAm0zK+MvpETpWZxNOdV -ZBFIohave+TAnpTyb8YpC1fCK/8dY4Q53yL/MNW9XosKI+5eQa+8X/FNEXv1TwNs -gHbYHHO7onDPDzkQoXBC0K65m8fSTsdbxazjG2UddyfWkI9wjESkE6yZjgtN52T3 -90Q7rR7mG9d9cA== +MIIFazCCA1OgAwIBAgIUbQjJ7ZePquLJ74Wi+WFWJTqChM8wDQYJKoZIhvcNAQEL +BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMzA4MzAwNzAzNTdaFw0zMzA4 +MjcwNzAzNTdaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw +HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCZlHDhq9m+eBuOgMAqREL3HQk0KJzJatYFSlj1zuFF +zJwDd6Q4LRxEu89PVf3l8xa09WY7Wa1t0NXXqndIzvh5gpha9uJ0I2hiq4IHBjql +8vEMLYeYWVdr9yePeOPF/OHnVRW3YWkj+G+cHgYweMMbWuLoxZHXyM+md5t2SmvH +4YrcgdcPD7Actl9GqIq4AMvqtu+X3W9jDyX6+S5TCgtcKHaBq0r3vSZ4BOn+zlgA +DejIjMyp8ws75vGrrP6aiP++Am4lVHnXV0EB3d1rx/WAH3Kf36uDwuD2+KRNwTVW +KJWkCUHUhp6GyZQT/OkuObROaar1DH3lPale0ka45JJlngFZQxXeHdpG4CSRD8pQ +j3WRGg1bmHx47m9lOaTqtmktjRXzGYNG/0eDwOQEs013unxBUw11gzL44W/AWqC8 +Hp3qZp2ZyzSLl+yrkKHcmgj3PpAcWtm/Vu0rMddjtkIIcXXf6nLOkpmDC+S6xIbc +Ksgd6ewy2tyxE5s3eNgKqPj4LJK0ANpDan/pVRpdQb0T5UUeNKCl4EeoZpwsHly6 +inltPqqZjwKOxqO037uBbc3gc/qacHBfb8yThm98PPR7A2C4BOwxNMvQ5e2Ey9+o +/w6qJNHOfX6W7YZhuf4OXBBMWj5LKoO/uVImg1fRATpGCwBK4kNqtVszWx6zy4bb +pwIDAQABo1MwUTAdBgNVHQ4EFgQU9lu+gYAb++EtfoAP3djV++c/SrYwHwYDVR0j +BBgwFoAU9lu+gYAb++EtfoAP3djV++c/SrYwDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQsFAAOCAgEAKLtUhk7tP0f9fjzc8qnseG+QsmdObJuMd/x7m/h7GOjn +0VqOkE8hRJJXVIIDv8ZssK3d+MNhHKIHuH3bRfFtqopXXOxnLR5FvI4Z9t88po8V +75IreWyqBW2u3fCNzYTgxkqKR2aOc4TN6qTRtAS17BJybbpT8GMu8lQ3ubSAjVY7 +CWh3RxXalUw9vGQ9LIzeyASiOWRDXeeEIeuNcwPzXGjssGPQGjbR2Cbes88A3Sf5 +By8da/dlZMxQOtlryOgaLKmXg/P3x6DzCmhS2tWfBMQ23ifuegYylFPecqpJw8L7 +Atz8TmULt2raWk+rrzcBwcBnx+t5WtFT7SrhEOiBtA5BwWNprLi9XyFqXQBsMpog +t/vSlCT8MmnleCmaXvHk/+xqasHDxaowSibOjJHxrkQzhkSC7atfKc9Qqw8kABAL +ZlaSBRGFs9MIGCIO0crMzYkHlxH9BuORnJDKGYRFzPVgov/QlnrZWoz67G7foj6U +Dt/HXx+taPY1WXjl5f+njgVXnQaEiH6kSfc3GP7zHVW8G/KYXGLdoKjYIn3iOFYr +mZtK7sQdO4g/RVH9arKj6JHlPo6l7b/RybamY0pny4ptiVPv0qq2cgxXMj8s7XeO +Tj65afHGguoEE61o/QbH5I+KDgcCOrIHq7vyjBfH/kQzViqTIBSpajVZ46V99Xc= -----END CERTIFICATE----- """ +# openssl req -new -key server.key -out server.csr +# openssl x509 -CAcreateserial -req -days 3650 -in server.csr -CA ca.crt -CAkey ca.key -out server.crt client_crt = """ -----BEGIN CERTIFICATE----- -MIICfDCCAeUCFCIyzicXHM920mzh6McYBfIKAmeQMA0GCSqGSIb3DQEBCwUAMH0x -CzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEPMA0GA1UEBwwGQXVzdGluMQ8w -DQYDVQQKDAZaZW5vc3MxDDAKBgNVBAsMA0RldjEPMA0GA1UEAwwGWmVub3NzMR0w -GwYJKoZIhvcNAQkBFg5kZXZAemVub3NzLmNvbTAeFw0yMjAxMjYxNzEwMDBaFw0y -NjEyMzExNzEwMDBaMH0xCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEPMA0G -A1UEBwwGQXVzdGluMQ8wDQYDVQQKDAZaZW5vc3MxDDAKBgNVBAsMA0RldjEPMA0G -A1UEAwwGWmVub3NzMR0wGwYJKoZIhvcNAQkBFg5kZXZAemVub3NzLmNvbTCBnzAN -BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAwYaLu8f8Hd9yTqGCfFXb1P60LEzlGUom -mStO06zfk3FFz9MBEbVHX53+92R/xhKVfUiRa967COM4y6XJHnfPD/sFBCir4z4+ -ApLRV8jEsWYP/sDG59nZDZm+IUqOwqWfYlvJWpbOlFC5s1q4xeECemM88c9poKAZ -AW3H9oM/pR0CAwEAATANBgkqhkiG9w0BAQsFAAOBgQDDH+LvhUfdLTGF2L/KwHxw -KdWs1KEoFUqI2kD9nUVDj0WoX6pSE8/txRS3Pw2PsA2KahAPTAOZJcLVy5rbUCvF -+DgiPegUZ/btgGrrT5NfTPtkb1E8wNsz+XOEwzlzNakA08Lec6q/vBewJVm2duMd -bqCsKPJj+yBv0nMqFWgVmQ== +MIIFETCCAvkCFHHU+QLzVaIAlzUoRGeFjk9PtLsRMA0GCSqGSIb3DQEBCwUAMEUx +CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl +cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjMwODMwMDcwNzIyWhcNMzMwODI3MDcw +NzIyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE +CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOC +Ag8AMIICCgKCAgEApZbM2FwREXdRztX/WqIwr3fK1BA5juT/t3aNuT5BVgYtPrJW +7FqoYTUVAcbK662A/yMMpNYTP5lejpEhwuFQXDi3rhNAI6oamhIKfEaZcEvj1VwS +O2JF+FNWl6t0y+gwd6RQTGJcpd5i2aszpc7BTBYjRxG1RQch4Uue5/fx9eD2XgxS +YgbLq6ODa1KVdm2yoWT+Tp8soaWPNFfeTDQrrQcsb5IT+oCclwsMK49z4hK2uxko +Pg7NsvrpLUNNlQCiFoiGdlswdfEumGolKDkS893vLF2pIGhykCGjKbVl+xVAQ6ch +TlkuOH2q91CDnojZwd6pkkPCiSG0v/crIROrGAu8mhKQkkzQ7IMixNLiBEdk9tum +2fCeqVhz/mGp6W5xlKXEIdoLPxtFRHpOI96PGGyHo5GhAIlrqqzUmuRSDnr4Iiyo +vwlX1pmVl1y4WK25oxhExf2qMAOSHH0eA8RhqIulKhbQCYnfwxbgSIBFyWGJR4bD +pyIpPum99RzUUQ5ez848cvK7LiNASeCHoBAeutG0YpXSBTHL+ql/UVLQ7e1yvdV/ +sDi1rnztLBPwUK6+To7Kn67eErdbi/50q/rXryF6YGbPsBZ5xVMmX9nleLkqSTSp +PPPWniHm6GBcZPWmAUWauBCa7XmJmdREoRZ1afA+efvpTgTm7/S4+GVvs+0CAwEA +ATANBgkqhkiG9w0BAQsFAAOCAgEAg2XMP4PAM96soOXNKFeihT4ccQvzzVOtMBV4 +Cc2OP1Ak5BUGRHRfd9AbjYLyRr5L9EyKJmckwZZtFEcRtZLAabUGYiu2L76wwgw3 +uZ3rtkt860N8bj7id9+P8139sIuKnBfnGf7YBp3fbH0GarWFQYyRBehCdsy6FS6o +gV9jmO3NYfXYHX6cR0uYKW9Bv7+xZrJf0+ZHtjwxDNGy3WRyVnTxDChbRFgN5irR +chiRkuOLI4sDa8dc0xrD9WiZ28+PeIiwYfjjHZqresSkOTPB0mzqYMLEDTClrf+M +Q8vW41k0rRvcudousTyrKuMfHxmdC8Ei1OuXjhVQ3nvJnRfWlAqlfSLAcfab0mNb +nptMwzyKDNWJB4aNUDT5RZ4Wbt2Khy9ol9F8STINQ6B71l8/1ORQrZol3QMR/hdu +SCEGu9rpKI+Yti+s0C6IDXU6qDP0kDAzITGrjeBkYSoIj7Tk6L4g1oaZQtQD9Pb8 +xF4SAAt+tc7/U7mvLolHTHpvZzxclHLoyyyRDJ2PkMNhcwQcYag/GCm31K42dKU/ +Zcsd5ZhlNhxmzipuJjzhqrICXCt+WGEiz0/KlxtHhJYEBrjimPoc6Mhm7Byo+4To +1HHU0t80+sRir1j+waNepTsdEdaNVKVweBu0lem2CU2IMzjkSQRu4UOssvJNTw0Y +hk+hfo0= -----END CERTIFICATE----- """ +# openssl genrsa -aes256 -passout pass:ytrewq -out server.pass.key 4096 +# openssl rsa -passin pass:ytrewq -in server.pass.key -out server.key client_pem = """ -----BEGIN RSA PRIVATE KEY----- -MIICWwIBAAKBgQDBhou7x/wd33JOoYJ8VdvU/rQsTOUZSiaZK07TrN+TcUXP0wER -tUdfnf73ZH/GEpV9SJFr3rsI4zjLpcked88P+wUEKKvjPj4CktFXyMSxZg/+wMbn -2dkNmb4hSo7CpZ9iW8lals6UULmzWrjF4QJ6Yzzxz2mgoBkBbcf2gz+lHQIDAQAB -AoGANo+lY7rdVNrDknGspTtbsDBjQb4oNToXqcVxAvLRUfN0mERIH+L5DXcxBDS8 -ZW6l4N2NyljQaJAPWjMSgdmLcdrhzABsicKQ1/gkjsfNK8Rz0IzlfR/MuljrFC6s -ZUeWuBsd5wp8/RrFXZVcNypV7mvJ/iJGZnoZqrAwn5bNS20CQQD06C8inRZRjhZC -wtvl/XQaiPsgoez8J8VU2lHMhIvFiJWp4dztoPsBi74MQcF/TgoItn2AmOtZOtph -3H7y1GMLAkEAykqRvYv6eoNbmMZFvDvyVJu2rbVAn0qWJz988oLrmduWWAMy5L2Z -pShUAnkBRT7FbXWYAjeSUEX8PITflQGxdwJAe6ic3CpbMZS/0rfXFprSO++8dW6t -XWirb7vIn66xcG0VvLCJwAaPluk7ba7qB+CcmmeimQMdmnFoAQ+3nd71nwJAA5Yy -41N6C3YMx7asQdwmPc3M/WN7U9e0tdlwU7RyjPXRwpm760ZZVQ5T/v86QIoOYhR1 -r4Rgub+j60bH2BKBnQJAIDZfnhUFv6eYy6I/yPcQ+sIOjC7X1/6XjjPNMAeOheVn -zUc7lh/1HLm55dzLz2Csrosc5YX3ZV99h58Mm5k+Vw== +MIIJKwIBAAKCAgEApZbM2FwREXdRztX/WqIwr3fK1BA5juT/t3aNuT5BVgYtPrJW +7FqoYTUVAcbK662A/yMMpNYTP5lejpEhwuFQXDi3rhNAI6oamhIKfEaZcEvj1VwS +O2JF+FNWl6t0y+gwd6RQTGJcpd5i2aszpc7BTBYjRxG1RQch4Uue5/fx9eD2XgxS +YgbLq6ODa1KVdm2yoWT+Tp8soaWPNFfeTDQrrQcsb5IT+oCclwsMK49z4hK2uxko +Pg7NsvrpLUNNlQCiFoiGdlswdfEumGolKDkS893vLF2pIGhykCGjKbVl+xVAQ6ch +TlkuOH2q91CDnojZwd6pkkPCiSG0v/crIROrGAu8mhKQkkzQ7IMixNLiBEdk9tum +2fCeqVhz/mGp6W5xlKXEIdoLPxtFRHpOI96PGGyHo5GhAIlrqqzUmuRSDnr4Iiyo +vwlX1pmVl1y4WK25oxhExf2qMAOSHH0eA8RhqIulKhbQCYnfwxbgSIBFyWGJR4bD +pyIpPum99RzUUQ5ez848cvK7LiNASeCHoBAeutG0YpXSBTHL+ql/UVLQ7e1yvdV/ +sDi1rnztLBPwUK6+To7Kn67eErdbi/50q/rXryF6YGbPsBZ5xVMmX9nleLkqSTSp +PPPWniHm6GBcZPWmAUWauBCa7XmJmdREoRZ1afA+efvpTgTm7/S4+GVvs+0CAwEA +AQKCAgEAiL4HW4Rr8+h8/jlqLgZR/hUGwijD32TsZyzXzGnEuq1PH79WWMhk1CFp +v5XSbN1S8V6YSmcebh7RHxpqruwx2HZd+Lqc9Na8MQ9E6WvDuiBxfPgTdkapUXBA +ye8k/F456BMg3HM93xvOtcHTXNFoftSpPT86Wk6Rg+NWzmjKvymPSgsS3TCPcKYP +GMmR88KTCQTFnVeFG9gEck09neBXUQPjhh8zsGIU7gaJfk9wevjJPaiAuv6uj2b0 +uBQkNS/YqpMDtymG017gA61kEdtP82MK57BQwhp+wNeGTiMmnDnoX/XcYz7yFGRy +ktlCV+DbMmYV0ltygpv7D6ulSiNb3aNFb0uk83xjoXKjx7YQW9bI1Uz2eFPoQAPo +mfgNW/Zp9P7z4WZJEbQPTQP/hNHMfRo+1Gx4J5Dm62I8hqByvoVoP2UTOZpKUmG7 +XOQEMqNei+fKuJbMOBoZ2qoEneSMw7RQfxDmD9xv7vDpb6XzWgiIzTCoo2Ikk/Cd +X2YBgYNP5VP7pYZ8FzOI9unvnOx5Zwxx15GYrYXV/pEOsmikkllvi1/wGtyyZUjb +s2BdSpP86tqBT+kB/hS8JxTIEN7HR61TQt5NB1WxPom4yUTatd8MqSzdrmJqC4z5 +DDurZPkvxFS5Kf34e1VG7SJrcxdegO9s/mtTn3eCqtmxR1GkL2ECggEBANYDNlK2 +0A6XVWTX24kMeV5wRsYgVv533KgefeRwdzKzRQ/0xKmPK02RftX8faUo3AF9YTl8 +XclHVovMsTCfDwNdXzvdb3w9r5SURmPWFjZjnHRdHOafwQS4GAzhX95N2KbmO1Lb +dknwiASKI0vrSzuZn0D81IDq2Ulq+NcjOeKS9SXCvF1igZDiB25NwT+5lnd/cXMH +0/ziHv9nIzLq+bfVE9KWTGYG7LzabqVUI6klDG5UqgKv0n3yae3fFUkaEC15zmdU +zcVXYf9uFnq3aPVyach61M3zpFkoZfdOc5FutSAENHEkMXPjaFmENPnYSN7O46dZ +CbjmzCS3HTzuiu8CggEBAMYThEnNRmsSW8pfX2JRRsLKBVB3FbkfBf/f5kfVLyjo +c1dnmZzQbUZzZeLs/HWjKqNxktsq762RmHdF2xv8HPutqOFXom/BlJcfhUXF9dan +bVnRLj+L7vwKxd4YWa91hLwLxgDRkWI4XO+iPht6KpzlwOaTrej99h6iTUaOzt7R +wSJlsBCDj1Jgews6QRHoP05R/Ehw9yBi+Azf5JneB+WIme5Y29WltAlBMJDHMCgc +4K/S40U96/TeWVjnc57RrzQN9yRM9Is4A3WqzP8xuCcA1KsUbpSly4vMDRQQP40Q +ETSy3R94lSGahs50fEt0VZCEwEHvig/5KnO6tWqFnuMCggEBAIeQnVaj6wNzJWqt +uakEt9T0tkBGuBSVhLcSKZkNDNSW7oZ+/ByUTk/ifD+8ozJ9wW9IJtAtUZNwlwgT +b6Jm/zGYcf0P9dDzmkc57aTMNmHZk3+6g9YrGC+PFd0C3qGJGlYOvUFtN2766I5H +mrg6ofttAo4+GbZYDbAODPbqn35ArP1wb7WP8pb+NsrOgj2FqCSmHA1LxiMIca5D +fO6CHhEu7lGVV2vBszCmBTTBKZ25lDhHdTIigem6JxPBHlCiK+FCqVaXR4lcIv2U +lLTDfb8M7KlL9YVIcrDvgDe6AEb9o8pWH4oT7SeFw9IAhzZEpVROJbMaGaiAuov/ +WowAZw0CggEBALFJ2LdB/8xoQzZQxQw4GTDSJ42M+SmX5gPPQMt8udhQrqRF+01L +lPNg6IoDejhE0i42wq5esOZXEfN32BUlRD/UgPspOB/1UW0ublg0RsVZWFvzCgUg +18hKUC5o9yU/941ksFYdPZZ/QlfOjO6FG00Rq+X1usx3O2rR9H655dm0Przt7Xfq +eUbPSnKTMpi3mqocYcXpLpiTXNgRMgiynbjJ2pVmfWWuCgXajoCXeLf+mPFmvbtF +IEQtHCWiDG/T2JCsC1A3fQ57FUWlmhS0SNLIQJHcGNn9x8EZ437YyDkXb38OtTKs ++DZ6nDyAMJxMxSU0XOznXVjMuT2amTR94ucCggEBAMNxXMOvtsy5s0SFk1jZOUAC +bEt9SvYYMzUIlNZZYTu69qU38J+ScPmQY2bTMM5oKu0y8RI8C7RtFUbgH6MqJfSM +pMa0uDNjVP1fEQbI5oatjsEJzyjqBVRgOSJODrgBSx2A4J9nfmXxkuv7U1Wo7CtN +0AG3p4wO5JH/IB/ex+ZevaoTYtBDnSagbJYsvWIfv9NYelL2zVG4lKJ9bBDnF0Xk +wZNb4mJtu3FtiJIXXdYDdrM+ARiLfn4t5HPccgFohFD/Ks3nQPXjydXruHkNPrdb +y979XN5woKDgO0sMzktFM0VssRC+bc4GuRMYLaPHI39q8a18q6Q0MBN0xaH+a5c= -----END RSA PRIVATE KEY----- """ class TestLoadCertificates(TestCase): - def setUp(t): t.FilePath_patcher = patch( "{src}.FilePath".format(**PATH), diff --git a/Products/ZenCollector/tests/test_daemon.py b/Products/ZenCollector/tests/test_daemon.py index ac375fab8c..fe7c6de7ee 100644 --- a/Products/ZenCollector/tests/test_daemon.py +++ b/Products/ZenCollector/tests/test_daemon.py @@ -1,4 +1,4 @@ -from mock import Mock, patch, create_autospec +from mock import ANY, Mock, patch, create_autospec from unittest import TestCase from Products.ZenHub.tests.mock_interface import create_interface_mock @@ -6,14 +6,13 @@ from Products.ZenCollector.daemon import ( CollectorDaemon, defer, - Failure, ICollectorPreferences, IConfigurationListener, ITaskSplitter, ) -class TestCollectorDaemon_maintenanceCycle(TestCase): +class TestCollectorDaemon_maintenanceCallback(TestCase): def setUp(t): # Patch out the __init__ method, due to excessive side-effects t.init_patcher = patch.object( @@ -39,19 +38,19 @@ def setUp(t): t.cd.getDevicePingIssues = create_autospec(t.cd.getDevicePingIssues) t.cd._unresponsiveDevices = set() - def test__maintenanceCycle(t): - ret = t.cd._maintenanceCycle() + def test__maintenanceCallback(t): + ret = t.cd._maintenanceCallback() t.cd.log.debug.assert_called_with( "deviceIssues=%r", t.cd.getDevicePingIssues.return_value ) - t.assertEqual(ret.result, t.cd.getDevicePingIssues.return_value) + t.assertIsNone(ret.result) def test_ignores_unresponsive_devices(t): t.cd.log = Mock(name="log") t.cd._prefs.pauseUnreachableDevices = False - ret = t.cd._maintenanceCycle() + ret = t.cd._maintenanceCallback() t.assertEqual(ret.result, None) @@ -60,19 +59,19 @@ def test_no_cycle_option(t): t.cd._prefs.pauseUnreachableDevices = False t.cd.options.cycle = False - ret = t.cd._maintenanceCycle() + ret = t.cd._maintenanceCallback() - t.assertEqual(ret.result, "No maintenance required") + t.assertIsNone(ret.result) def test_handle_getDevicePingIssues_exception(t): t.cd.getDevicePingIssues.side_effect = Exception handler = _Capture() - ret = t.cd._maintenanceCycle() + ret = t.cd._maintenanceCallback() ret.addErrback(handler) - t.assertIsInstance(handler.err, Failure) - t.assertIsInstance(handler.err.value, Exception) + t.assertIsNone(handler.err) + t.cd.log.exception.assert_called_once_with(ANY) def test_handle__pauseUnreachableDevices_exception(t): t.cd._pauseUnreachableDevices = create_autospec( @@ -81,11 +80,11 @@ def test_handle__pauseUnreachableDevices_exception(t): t.cd._pauseUnreachableDevices.side_effect = Exception handler = _Capture() - ret = t.cd._maintenanceCycle() + ret = t.cd._maintenanceCallback() ret.addErrback(handler) - t.assertIsInstance(handler.err, Failure) - t.assertIsInstance(handler.err.value, Exception) + t.assertIsNone(handler.err) + t.cd.log.exception.assert_called_once_with(ANY) def test__pauseUnreachableDevices(t): t.cd._scheduler = Mock( @@ -105,7 +104,6 @@ def test__pauseUnreachableDevices(t): class _Capture(object): - def __init__(self): self.err = None diff --git a/Products/ZenCollector/utils/maintenance.py b/Products/ZenCollector/utils/maintenance.py index 6867a2b3db..d4e29aad00 100644 --- a/Products/ZenCollector/utils/maintenance.py +++ b/Products/ZenCollector/utils/maintenance.py @@ -10,10 +10,12 @@ import logging from twisted.internet import defer, reactor -from twisted.python.failure import Failure +from twisted.internet.task import LoopingCall from zenoss.protocols.protobufs.zep_pb2 import DaemonHeartbeat from zope.component import getUtility +from Products.ZenEvents.ZenEventClasses import Heartbeat +from Products.ZenHub.interfaces import IEventService from Products.ZenMessaging.queuemessaging.interfaces import IQueuePublisher log = logging.getLogger("zen.maintenance") @@ -28,8 +30,8 @@ def maintenanceBuildOptions(parser, defaultCycle=60): dest="maintenancecycle", default=defaultCycle, type="int", - help="Cycle, in seconds, for maintenance tasks." - " Default is %s." % defaultCycle, + help="Cycle, in seconds, for maintenance tasks " + "[default %s]" % defaultCycle, ) @@ -52,72 +54,68 @@ def heartbeat(self): daemon=self._daemon, timeout_seconds=self._timeout, ) - log.debug("sending heartbeat %s", heartbeat) publisher = getUtility(IQueuePublisher) publisher.publish( "$Heartbeats", "zenoss.heartbeat.%s" % heartbeat.monitor, heartbeat ) + log.debug("sent heartbeat %s", heartbeat) + + +class ZenHubHeartbeatSender(object): + """ + Default heartbeat sender for CollectorDaemon. + """ + + def __init__(self, monitor, daemon, timeout): + self.__event = { + "eventClass": Heartbeat, + "device": monitor, + "component": daemon, + "timeout": timeout + } + + def heartbeat(self): + getUtility(IEventService).sendHeartbeat(self.__event) class MaintenanceCycle(object): def __init__( self, cycleInterval, heartbeatSender=None, maintenanceCallback=None ): - self._cycleInterval = cycleInterval - self._heartbeatSender = heartbeatSender - self._callback = maintenanceCallback - self._stop = False + self.__interval = cycleInterval + self.__heartbeatSender = heartbeatSender + self.__callback = maintenanceCallback + self.__task = LoopingCall(self._maintenance) def start(self): - reactor.callWhenRunning(self._doMaintenance) + if self.__interval > 0: + interval = self.__interval + self.__task.start(interval, now=True) + else: + # maintenance is run only once if _interval <= 0. + interval = "run-once" + reactor.callWhenRunning(self._maintenance) + log.debug("maintenance started interval=%s", interval) def stop(self): - log.debug("Maintenance stopped") - self._stop = True + self.__task.stop() + log.debug("maintenance stopped") - def _doMaintenance(self): + @defer.inlineCallbacks + def _maintenance(self): """ - Perform daemon maintenance processing on a periodic schedule. Initially - called after the daemon configuration loader task is added, but - afterward will self-schedule each run. + Perform daemon maintenance processing on a periodic schedule. """ - if self._stop: - log.debug("Skipping, maintenance stopped") - return - - log.info("Performing periodic maintenance") - interval = self._cycleInterval - - def _maintenance(): - if self._heartbeatSender is not None: - log.debug("Calling heartbeat sender") - d = defer.maybeDeferred(self._heartbeatSender.heartbeat) - d.addCallback(self._additionalMaintenance) - return d - else: - log.debug("Skipping heartbeat: no sender configured") - return defer.maybeDeferred(self._additionalMaintenance) - - def _reschedule(result): - if isinstance(result, Failure): - # The full error message is actually the entire traceback, so - # just get the last line with the actual message. - log.error( - "Maintenance failed. Message from hub: %s", - result.getErrorMessage(), - ) - - if interval > 0: - log.debug("Rescheduling maintenance in %ds", interval) - reactor.callLater(interval, self._doMaintenance) - - d = _maintenance() - d.addBoth(_reschedule) - - return d - - def _additionalMaintenance(self, result=None): - if self._callback: - log.debug("calling additional maintenance") - d = defer.maybeDeferred(self._callback, result) - return d + if self.__heartbeatSender is not None: + try: + yield self.__heartbeatSender.heartbeat() + log.debug("sent heartbeat") + except Exception: + log.exception("failed to send heartbeat") + if self.__callback is not None: + try: + yield self.__callback() + log.debug("executed maintenance callback") + except Exception: + log.exception("failed to execute maintenance callback") + log.debug("performed periodic maintanence") diff --git a/Products/ZenCollector/zencyberark.py b/Products/ZenCollector/zencyberark.py index c64e9cb246..dbcc488624 100644 --- a/Products/ZenCollector/zencyberark.py +++ b/Products/ZenCollector/zencyberark.py @@ -13,6 +13,7 @@ import httplib import json import logging +import sys from twisted.internet.defer import inlineCallbacks, maybeDeferred from twisted.internet.task import react @@ -20,14 +21,20 @@ from .cyberark import get_cyberark -def configure_logging(debug=False): - log = logging.getLogger() - log.setLevel(logging.INFO if not debug else logging.DEBUG) +def configure_logging(verbose=False): + logging._handlers.clear() + formatter = logging.Formatter("%(levelname)s: %(message)s") + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(formatter) + + root = logging.getLogger() + root.setLevel(logging.INFO if not verbose else logging.DEBUG) + root.addHandler(handler) return logging.getLogger("zencyberark") def check(args): - log = configure_logging(args.debug) + log = configure_logging(args.verbose) log.info("Checking config") get_cyberark() return 0 @@ -35,7 +42,7 @@ def check(args): @inlineCallbacks def get(args): - log = configure_logging(args.debug) + log = configure_logging(args.verbose) cyberark = get_cyberark() client = cyberark._manager._client @@ -92,7 +99,13 @@ def parse_args(): description="Model Catalog hacking tool", formatter_class=argparse.RawTextHelpFormatter, ) - parser.add_argument("-d", "--debug", action="store_true", default=False) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + help="display more information", + ) subparsers = parser.add_subparsers(help="sub-command help") check_cmd = subparsers.add_parser( diff --git a/Products/ZenEvents/Availability.py b/Products/ZenEvents/Availability.py index 01c8ff5ada..ffe6151846 100644 --- a/Products/ZenEvents/Availability.py +++ b/Products/ZenEvents/Availability.py @@ -113,9 +113,19 @@ def __float__(self): def __int__(self): return int(self.availability * 100) - def __cmp__(self, other): - return cmp((self.availability, self.device, self.component()), - (other.availability, other.device, other.component())) + def __eq__(self, other): + if not isinstance(other, Availability): + return False + this = (self.availability, self.device, self.component()) + that = (other.availability, other.device, other.component()) + return this == that + + def __lt__(self, other): + if not isinstance(other, Availability): + return NotImplemented + this = (self.availability, self.device, self.component()) + that = (other.availability, other.device, other.component()) + return this < that def getDevice(self, dmd): return dmd.Devices.findDevice(self.device) @@ -183,9 +193,26 @@ def tuple(self): def __hash__(self): return hash(self.tuple()) - def __cmp__(self, other): - return cmp(self.tuple(), other.tuple()) - + def __eq__(self, other): + if not isinstance(other, Report): + return False + if self is other: + return True + return self.tuple() == other.tuple() + + def __lt__(self, other): + if not isinstance(other, Report): + return NotImplemented + if self is other: + return False + return self.tuple() < other.tuple() + + def __le__(self, other): + if not isinstance(other, Report): + return NotImplemented + if self is other: + return True + return self.tuple() <= other.tuple() def run(self, dmd): """Run the report, returning an Availability object for each device""" diff --git a/Products/ZenEvents/EventManagerBase.py b/Products/ZenEvents/EventManagerBase.py index 3188646377..0ce83972fb 100644 --- a/Products/ZenEvents/EventManagerBase.py +++ b/Products/ZenEvents/EventManagerBase.py @@ -1,6 +1,6 @@ ############################################################################## # -# Copyright (C) Zenoss, Inc. 2007, all rights reserved. +# Copyright (C) Zenoss, Inc. 2007, 2023 all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. @@ -117,6 +117,95 @@ class EventManagerBase(ZenModelRM): eventAgingHours = 4 eventAgingSeverity = 4 historyMaxAgeDays = 0 + trapFilters = """# Format: [COLLECTOR REGEX] include|exclude v1|v2 +# Include all generic SNMP V1 Traps 0-5 +include v1 0 +include v1 1 +include v1 2 +include v1 3 +include v1 4 +include v1 5 + +# Include all enterprise-specific SNMP V1 traps +include v1 * + +# Include all SNMP V2 traps +include v2 *""" + # Regular expressions that parse syslog tags from different sources + # A tuple can also be specified, in which case the second item in the + # tuple is a boolean which tells whether or not to keep the entry (default) + # or to discard the entry and not create an event. + syslogParsers = [{ + "description": "generic mark", + "expr": "^(?P-- (?PMARK) --)", + "keep": True +},{ + "description": "Cisco UCS: 2010 Oct 19 15:47:45 CDT: snmpd: SNMP Operation (GET) failed. Reason:2 reqId (257790979) errno (42) error index (1)", + "expr": "^: \d{4} \w{3}\s+\d{1,2}\s+\d{1,2}:\d\d:\d\d \w{3}: %(?P[^:]+): (?P.*)", + "keep": True +},{ + "description": "ntsyslog windows msg", + "expr": "^(?P.+)\[(?P\D+)\] (?P\d+) (?P.*)", + "keep": True +},{ + "description": "cisco msg with card indicator", + "expr": "%CARD-\S+:(SLOT\d+) %(?P\S+): (?P.*)", + "keep": True +},{ + "description": "cisco standard msg", + "expr": "%(?P(?P\S+)-(?P\d)-\S+): *(?P.*)", + "keep": True +},{ + "description": "Cisco ACS", + "expr": "^(?P\S+)\s+(?P(?P(CisACS_\d\d|CSCOacs)_\S+)\s+(?P\S+)\s.*)", + "keep": True +},{ + "description": "netscreen device msg", + "expr": "device_id=\S+\s+\[\S+\](?P\S+\d+):\s+(?P.*)\s+\((?P\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)\)", + "keep": True +},{ + "description": "NetApp: [deviceName: 10/100/1000/e1a:warning]: Client 10.0.0.101 (xid 4251521131) is trying to access an unexported mount (fileid 64, snapid 0, generation 6111516 and flags 0x0 on volume 0xc97d89a [No volume name available])", + "expr": "^\[[^:]+: (?P[^:]+)[^\]]+\]: (?P.*)", + "keep": True +},{ + "description": "unix syslog with pid", + "expr": "(?P\S+)\[(?P\d+)\]:\s*(?P.*)", + "keep": True +},{ + "description": "unix syslog without pid", + "expr": "(?P\S+): (?P.*)", + "keep": True +},{ + "description": "adtran devices", + "expr": "^(?P[^\[]+)\[(?PADTRAN)\]:(?P[^\|]+\|\d+\|\d+)\|(?P.*)", + "keep": True +},{ + "description": "fortigate devices", + "expr": "^date=.+ (?Pdevname=.+ log_id=(?P\d+) type=(?P\S+).+)", + "keep": True +},{ + "description": "proprietary message passing system", + "expr": "^(?P\S+)(\.|\s)[A-Z]{3} \d \S+ \d\d:\d\d:\d\d-\d\d:\d\d:\d\d \d{5} \d{2} \d{5} \S+ \d{4} \d{3,5} (- )*(?P.*) \d{4} \d{4}", + "keep": True +},{ + "description": "Cisco port state logging info", + "expr": "^Process (?P\d+), Nbr (?P\d+\.\d+\.\d+\.\d+) on (?P\w+/\d+) from (?P\w+) to (?P\w+), (?P.+)", + "keep": True +},{ + "description": "Cisco VPN Concentrator: 54884 05/25/2009 13:41:14.060 SEV=3 HTTP/42 RPT=4623 Error on socket accept.", + "expr": "^\d+ \d+\/\d+\/\d+ \d+:\d+:\d+\.\d+ SEV=\d+ (?P\S+) RPT=\d+ (?P.*)", + "keep": True +},{ + "description": "Dell Storage Array: 2626:48:VolExec:27-Aug-2009 13:15:58.072049:VE_VolSetWorker.hh:75:WARNING:43.3.2:Volume volumeName has reached 96 percent of its reported size and is currently using 492690MB.", + "expr": "^\d+:\d+:(?P[^:]+):\d+-\w{3}-\d{4} \d{2}:\d{2}:\d{2}\.\d+:[^:]+:\d+:\w+:(?P[^:]+):(?P.*)", + "keep": True +},{ + "description": "1-Oct-2009 23:00:00.383809:snapshotDelete.cc:290:INFO:8.2.5:Successfully deleted snapshot \"UNVSQLCLUSTERTEMPDB-2009-09-30-23:00:14.11563\"", + "expr": "^\d+-\w{3}-\d{4} \d{2}:\d{2}:\d{2}\.\d+:[^:]+:\d+:\w+:(?P[^:]+):(?P.*)", + "keep": True +}] + syslogSummaryToMessage = False + syslogMsgEvtFieldFilterRules = {} _properties = ( {'id':'backend', 'type':'string','mode':'r', }, @@ -158,6 +247,10 @@ class EventManagerBase(ZenModelRM): {'id':'eventAgingHours', 'type':'int', 'mode':'w'}, {'id':'eventAgingSeverity', 'type':'int', 'mode':'w'}, {'id':'historyMaxAgeDays', 'type':'int', 'mode':'w'}, + {'id':'trapFilters', 'type':'string', 'mode':'w'}, + {'id':'syslogParsers', 'type':'list', 'mode':'w'}, + {'id':'syslogSummaryToMessage', 'type':'boolean', 'mode':'w'}, + {'id':'syslogMsgEvtFieldFilterRules', 'type':'dict', 'mode':'w'}, ) _relations = ( @@ -233,9 +326,6 @@ def getAvailability(self, state, **kw): for name in allowedFilters: if hasattr(state, name): kw.setdefault(name, getattr(state, name)) - # ZEN-30539 - if not getattr(state, 'generate', ''): - return None if getattr(state, 'startDate', None) is not None: kw.setdefault('startDate', Time.ParseUSDate(state.startDate)) if getattr(state, 'endDate', None) is not None: diff --git a/Products/ZenEvents/EventServer.py b/Products/ZenEvents/EventServer.py index 296c9af6a0..3679c4596d 100644 --- a/Products/ZenEvents/EventServer.py +++ b/Products/ZenEvents/EventServer.py @@ -7,17 +7,13 @@ # ############################################################################## - -__doc__ = """EventServer +"""EventServer Formerly contained base class 'EventServer' for ZenSyslog, ZenTrap and others. Stats is still used by ZenSysLog and ZenTrap - -$Id$ """ -__version__ = "$Revision$"[11:-2] - +from __future__ import absolute_import class Stats: @@ -31,4 +27,4 @@ def add(self, moreTime): self.maxTime = max(self.maxTime, moreTime) def report(self): - return self.totalTime, self.totalEvents, self.maxTime \ No newline at end of file + return self.totalTime, self.totalEvents, self.maxTime diff --git a/Products/ZenEvents/SyslogProcessing.py b/Products/ZenEvents/SyslogProcessing.py deleted file mode 100644 index 5cd5a2f504..0000000000 --- a/Products/ZenEvents/SyslogProcessing.py +++ /dev/null @@ -1,307 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -__doc__ = """SyslogProcessing -Class for turning syslog events into Zenoss Events -""" - -import re -import logging -slog = logging.getLogger("zen.Syslog") -import socket - -from Products.ZenEvents.syslog_h import * -from Products.ZenUtils.IpUtil import isip - - -# Regular expressions that parse syslog tags from different sources -# A tuple can also be specified, in which case the second item in the -# tuple is a boolean which tells whether or not to keep the entry (default) -# or to discard the entry and not create an event. -parsers = ( -# generic mark -r"^(?P-- (?PMARK) --)", - -# Cisco UCS -# : 2010 Oct 19 15:47:45 CDT: snmpd: SNMP Operation (GET) failed. Reason:2 reqId (257790979) errno (42) error index (1) -r'^: \d{4} \w{3}\s+\d{1,2}\s+\d{1,2}:\d\d:\d\d \w{3}: %(?P[^:]+): (?P.*)', - -# ntsyslog windows msg -r"^(?P.+)\[(?P\D+)\] (?P\d+) (?P.*)", - -# cisco msg with card indicator -r"%CARD-\S+:(SLOT\d+) %(?P\S+): (?P.*)", - -# cisco standard msg -r"%(?P(?P\S+)-(?P\d)-\S+): *(?P.*)", - -# Cisco ACS -r"^(?P\S+)\s+(?P(?P(CisACS_\d\d|CSCOacs)_\S+)\s+(?P\S+)\s.*)", - -# netscreen device msg -r"device_id=\S+\s+\[\S+\](?P\S+\d+):\s+(?P.*)\s+\((?P\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)\)", - -# NetApp -# [deviceName: 10/100/1000/e1a:warning]: Client 10.0.0.101 (xid 4251521131) is trying to access an unexported mount (fileid 64, snapid 0, generation 6111516 and flags 0x0 on volume 0xc97d89a [No volume name available]) -r"^\[[^:]+: (?P[^:]+)[^\]]+\]: (?P.*)", - -# unix syslog with pid -r"(?P\S+)\[(?P\d+)\]:\s*(?P.*)", - -# unix syslog without pid -r"(?P\S+): (?P.*)", - -# adtran devices -r"^(?P[^\[]+)\[(?PADTRAN)\]:(?P[^\|]+\|\d+\|\d+)\|(?P.*)", - -r"^date=.+ (?Pdevname=.+ log_id=(?P\d+) type=(?P\S+).+)", - -# proprietary message passing system -r"^(?P\S+)(\.|\s)[A-Z]{3} \d \S+ \d\d:\d\d:\d\d-\d\d:\d\d:\d\d \d{5} \d{2} \d{5} \S+ \d{4} \d{3,5} (- )*(?P.*) \d{4} \d{4}", - -# Cisco port state logging info -r"^Process (?P\d+), Nbr (?P\d+\.\d+\.\d+\.\d+) on (?P\w+/\d+) from (?P\w+) to (?P\w+), (?P.+)", - -# Cisco VPN Concentrator -# 54884 05/25/2009 13:41:14.060 SEV=3 HTTP/42 RPT=4623 Error on socket accept. -r"^\d+ \d+\/\d+\/\d+ \d+:\d+:\d+\.\d+ SEV=\d+ (?P\S+) RPT=\d+ (?P.*)", - -# Dell Storage Array -# 2626:48:VolExec:27-Aug-2009 13:15:58.072049:VE_VolSetWorker.hh:75:WARNING:43.3.2:Volume volumeName has reached 96 percent of its reported size and is currently using 492690MB. -r'^\d+:\d+:(?P[^:]+):\d+-\w{3}-\d{4} \d{2}:\d{2}:\d{2}\.\d+:[^:]+:\d+:\w+:(?P[^:]+):(?P.*)', - -# 1-Oct-2009 23:00:00.383809:snapshotDelete.cc:290:INFO:8.2.5:Successfully deleted snapshot 'UNVSQLCLUSTERTEMPDB-2009-09-30-23:00:14.11563'. -r'^\d+-\w{3}-\d{4} \d{2}:\d{2}:\d{2}\.\d+:[^:]+:\d+:\w+:(?P[^:]+):(?P.*)', -) - -# compile regex parsers on load -compiledParsers = [] -for regex in parsers: - keepEntry = True - if isinstance(regex, tuple): - regex, keepEntry = regex - try: - compiled = re.compile(regex, re.DOTALL) - compiledParsers.append((compiled, keepEntry)) - except Exception: - pass - - -class SyslogProcessor(object): - """ - Class to process syslog messages and convert them into events viewable - in the Zenoss event console. - """ - - def __init__(self,sendEvent,minpriority,parsehost,monitor,defaultPriority): - """ - Initializer - - @param sendEvent: message from a remote host - @type sendEvent: string - @param minpriority: ignore anything under this priority - @type minpriority: integer - @param parsehost: hostname where this parser is running - @type parsehost: string - @param monitor: name of the distributed collector monitor - @type monitor: string - @param defaultPriority: priority to use if it can't be understood from the received packet - @type defaultPriority: integer - """ - self.minpriority = minpriority - self.parsehost = parsehost - self.sendEvent = sendEvent - self.monitor = monitor - self.defaultPriority = defaultPriority - - - def process(self, msg, ipaddr, host, rtime): - """ - Process an event from syslog and convert to a Zenoss event - - @param msg: message from a remote host - @type msg: string - @param ipaddr: IP address of the remote host - @type ipaddr: string - @param host: remote host's name - @type host: string - @param rtime: time as reported by the remote host - @type rtime: string - """ - evt = dict(device=host, - ipAddress=ipaddr, - firstTime=rtime, - lastTime=rtime, - eventGroup='syslog') - slog.debug("host=%s, ip=%s", host, ipaddr) - slog.debug(msg) - - evt, msg = self.parsePRI(evt, msg) - if evt['priority'] > self.minpriority: return - - evt, msg = self.parseHEADER(evt, msg) - evt = self.parseTag(evt, msg) - if evt: - # Cisco standard msg includes the severity in the tag - if 'overwriteSeverity' in evt.keys(): - old_severity = evt['severity'] - new_severity = self.defaultSeverityMap(int(evt['overwriteSeverity'])) - evt['severity'] = new_severity - slog.debug('Severity overwritten in message tag. Previous:%s Current:%s', old_severity, new_severity) - #rest of msg now in summary of event - evt = self.buildEventClassKey(evt) - evt['monitor'] = self.monitor - evt['message'] = unicode(msg) - if 'summary' in evt: - evt['summary'] = unicode(evt['summary'] ) - self.sendEvent(evt) - - - def parsePRI(self, evt, msg): - """ - Parse RFC-3164 PRI part of syslog message to get facility and priority. - - @param evt: dictionary of event properties - @type evt: dictionary - @param msg: message from host - @type msg: string - @return: tuple of dictionary of event properties and the message - @type: (dictionary, string) - """ - pri = self.defaultPriority - fac = None - if msg[:1] == '<': - pos = msg.find('>') - fac, pri = LOG_UNPACK(int(msg[1:pos])) - msg = msg[pos+1:] - elif msg and msg[0] < ' ': - fac, pri = LOG_KERN, ord(msg[0]) - msg = msg[1:] - evt['facility'] = fac - evt['priority'] = pri - evt['severity'] = self.defaultSeverityMap(pri) - slog.debug("fac=%s pri=%s", fac, pri) - slog.debug("facility=%s severity=%s", evt['facility'], evt['severity']) - return evt, msg - - - def defaultSeverityMap(self, pri): - """ - Default mapping from syslog priority to severity. - - @param pri: syslog priority from host - @type pri: integer - @return: numeric severity - @type: integer - """ - sev = 1 - if pri < 3: sev = 5 - elif pri == 3: sev = 4 - elif pri == 4: sev = 3 - elif pri == 5 or pri == 6: sev = 2 - return sev - - - timeParse = \ - re.compile("^(\S{3} [\d ]{2} [\d ]{2}:[\d ]{2}:[\d ]{2}(?:\.\d{1,3})?) (.*)", re.DOTALL).search - notHostSearch = re.compile("[\[:]").search - def parseHEADER(self, evt, msg): - """ - Parse RFC-3164 HEADER part of syslog message. TIMESTAMP format is: - MMM HH:MM:SS and host is next token without the characters '[' or ':'. - - @param evt: dictionary of event properties - @type evt: dictionary - @param msg: message from host - @type msg: string - @return: tuple of dictionary of event properties and the message - @type: (dictionary, string) - """ - slog.debug(msg) - m = re.sub("Kiwi_Syslog_Daemon \d+: \d+: " - "\S{3} [\d ]{2} [\d ]{2}:[\d ]{2}:[^:]+: ", "", msg) - m = self.timeParse(msg) - if m: - slog.debug("parseHEADER timestamp=%s", m.group(1)) - evt['originalTime'] = m.group(1) - msg = m.group(2).strip() - msglist = msg.split() - if self.parsehost and not self.notHostSearch(msglist[0]): - device = msglist[0] - if device.find('@') >= 0: - device = device.split('@', 1)[1] - slog.debug("parseHEADER hostname=%s", evt['device']) - msg = " ".join(msglist[1:]) - evt['device'] = device - if isip(device): - evt['ipAddress'] = device - else: - if 'ipAddress' in evt: - del(evt['ipAddress']) - return evt, msg - - - def parseTag(self, evt, msg): - """ - Parse the RFC-3164 tag of the syslog message using the regex defined - at the top of this module. - - @param evt: dictionary of event properties - @type evt: dictionary - @param msg: message from host - @type msg: string - @return: dictionary of event properties - @type: dictionary - """ - slog.debug(msg) - for parser, keepEntry in compiledParsers: - slog.debug("tag regex: %s", parser.pattern) - m = parser.search(msg) - if not m: - continue - elif not keepEntry: - slog.debug("Dropping syslog message due to parser rule.") - return None - slog.debug("tag match: %s", m.groupdict()) - evt.update(m.groupdict()) - break - else: - slog.debug("No matching parser: '%s'", msg) - evt['summary'] = msg - return evt - - - def buildEventClassKey(self, evt): - """ - Build the key used to find an events dictionary record. If eventClass - is defined it is used. For NT events "Source_Evid" is used. For other - syslog events we use the summary of the event to perform a full text - or'ed search. - - @param evt: dictionary of event properties - @type evt: dictionary - @return: dictionary of event properties - @type: dictionary - """ - if 'eventClassKey' in evt or 'eventClass' in evt: - return evt - elif 'ntevid' in evt: - evt['eventClassKey'] = "%s_%s" % (evt['component'],evt['ntevid']) - elif 'component' in evt: - evt['eventClassKey'] = evt['component'] - if 'eventClassKey' in evt: - slog.debug("eventClassKey=%s", evt['eventClassKey']) - try: - evt['eventClassKey'] = evt['eventClassKey'].decode('latin-1') - except Exception: - evt['eventClassKey'] = evt['eventClassKey'].decode('utf-8') - else: - slog.debug("No eventClassKey assigned") - return evt diff --git a/Products/ZenEvents/TrapFilter.py b/Products/ZenEvents/TrapFilter.py deleted file mode 100644 index a93f68cae1..0000000000 --- a/Products/ZenEvents/TrapFilter.py +++ /dev/null @@ -1,476 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2015, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -__doc__ = """zentrap - -Filters SNMP traps. -""" - -import sys -import logging -import os.path - -import zope.interface -import zope.component - -from zope.interface import implements - -from Products.ZenCollector.interfaces import ICollector, IEventService -from Products.ZenHub.interfaces import ICollectorEventTransformer, \ - TRANSFORM_CONTINUE, \ - TRANSFORM_DROP -from Products.ZenUtils.Utils import unused, zenPath - -log = logging.getLogger("zen.zentrap") - -class TrapFilterError(Exception): - def __init__(self, message): - self.message = message - def __str__(self): - return self.message - -def countOidLevels(oid): - """ - @return: The number of levels in an OID - @rtype: int - """ - return oid.count(".") + 1 if oid else 0 - -def getNextHigherGlobbedOid(oid): - """ - Gets the next highest globbed OID based on OID hierarchy. - For instance, given an oid of or "1.2.3.4" or 1.2.3.4.*", return "1.2.3.*". - - @return: The next highest globbed OID up to just "*" - @rtype: string - """ - dotIndex = oid.rfind(".") - if dotIndex != -1 and oid[dotIndex:] == ".*": - dotIndex = oid.rfind('.', 0, dotIndex) - - if dotIndex < 1 or dotIndex == len(oid)-1: - nextGlobbedOID = "*" - else: - nextGlobbedOID = ''.join([oid[0:dotIndex], ".*"]) - return nextGlobbedOID - -class BaseFilterDefinition(object): - def __init__(self, lineNumber=None, action=None): - self.lineNumber = lineNumber - self.action = action - -class GenericTrapFilterDefinition(BaseFilterDefinition): - def __init__(self, lineNumber=None, action=None, genericTrap=None): - BaseFilterDefinition.__init__(self, lineNumber, action) - self.genericTrap = genericTrap - - def __eq__(self, other): - if isinstance(other, GenericTrapFilterDefinition): - return self.genericTrap == other.genericTrap - else: - return False - - def __ne__(self, other): - return not self.__eq__(other) - - def __hash__(self): - return hash(self.genericTrap) - -class OIDBasedFilterDefinition(BaseFilterDefinition): - def __init__(self, lineNumber=None, action=None, oid=None): - BaseFilterDefinition.__init__(self, lineNumber, action) - self.oid = oid - - def levels(self): - return countOidLevels(self.oid) - - def __eq__(self, other): - if isinstance(other, OIDBasedFilterDefinition): - return self.oid == other.oid - else: - return False - - def __ne__(self, other): - return not self.__eq__(other) - - def __hash__(self): - return hash(self.oid) - -class V1FilterDefinition(OIDBasedFilterDefinition): - def __init__(self, lineNumber=None, action=None, oid=None): - OIDBasedFilterDefinition.__init__(self, lineNumber, action, oid) - self.specificTrap = None - -class V2FilterDefinition(OIDBasedFilterDefinition): - def __init__(self, lineNumber=None, action=None, oid=None): - OIDBasedFilterDefinition.__init__(self, lineNumber, action, oid) - -class TrapFilter(object): - implements(ICollectorEventTransformer) - """ - Interface used to perform filtering of events at the collector. This could be - used to drop events, transform event content, etc. - - These transformers are run sequentially before a fingerprint is generated for - the event, so they can set fields which are used by an ICollectorEventFingerprintGenerator. - - The priority of the event transformer (the transformers are executed in - ascending order using the weight of each filter). - """ - weight = 1 - def __init__(self): - self._daemon = None - self._eventService = None - - self._genericTraps = frozenset([0, 1, 2, 3, 4, 5]) - - # Map of SNMP V1 Generic Trap filters where key is the generic trap number and - # value is a GenericTrapFilterDefinition - self._v1Traps = dict() - - # Map of SNMP V1 enterprise-specific traps where key is the count of levels in an OID, and - # value is a map of unique V1FilterDefinition objects for that number of OID levels. - # The map of V1FilterDefinition objects is keyed by "OID[-specificTrap]" - self._v1Filters = dict() - - # Map of SNMP V2 enterprise-specific traps where key is the count of levels in an OID, and - # value is a map of unique V2FilterDefinition objects for that number of OID levels. - # The map of V2FilterDefinition objects is keyed by OID - self._v2Filters = dict() - self._filtersDefined = False - self._initialized = False - - def _parseFilterDefinition(self, line, lineNumber): - """ - Parse an SNMP filter definition of the format: - include|exclude v1|v2 - - @param line: The filter definition to parse - @type line: string - @param lineNumber: The line number of the filter defintion within the file - @type line: int - @return: Returns None on success, or an error message on failure - @rtype: string - """ - tokens = line.split() - if len(tokens) < 3: - return "Incomplete filter definition" - - action = tokens[0].lower() - snmpVersion = tokens[1].lower() - if action != "include" and action != "exclude": - return "Invalid action '%s'; the only valid actions are 'include' or 'exclude'" % tokens[0] - elif snmpVersion != "v1" and snmpVersion != "v2": - return "Invalid SNMP version '%s'; the only valid versions are 'v1' or 'v2'" % tokens[1] - - if snmpVersion == "v1": - return self._parseV1FilterDefinition(lineNumber, action, tokens[2:]) - - return self._parseV2FilterDefinition(lineNumber, action, tokens[2:]) - - def _parseV1FilterDefinition(self, lineNumber, action, remainingTokens): - """ - Parse an SNMP V1 filter definition. - Valid definitions have one of the following formats: - v1 include|exclude TRAP_TYPE - v1 include|exclude GLOBBED_OID - v1 include|exclude OID *|SPECIFIC_TRAP - where - TRAP_TYPE is a generic trap type in the rage [0-5] - GLOBBED_OID is an OID ending with ".*" - OID is an valid OID - SPECIFIC_TRAP is any specific trap type (any non-negative integer) - Note that the last two cases are used for enterprise-specific traps (i.e. - where the generic trap type is 6). - - @param lineNumber: The line number of the filter defintion within the file - @type line: int - @param action: The action for this line (include or exclude) - @type line: string - @param remainingTokens: The remaining (unparsed) tokens from the filter definition - @type line: string array - @return: Returns None on success, or an error message on failure - @rtype: string - """ - if len(remainingTokens) > 2: - return "Too many fields found; at most 4 fields allowed for V1 filters" - - oidOrGenericTrap = "" - if len(remainingTokens) > 0: - oidOrGenericTrap = remainingTokens[0].strip(".") - if len(oidOrGenericTrap) == 1 and oidOrGenericTrap != "*": - if not oidOrGenericTrap.isdigit() or oidOrGenericTrap not in "012345": - return "Invalid generic trap '%s'; must be one of 0-5" % (oidOrGenericTrap) - - genericTrapType = oidOrGenericTrap - genericTrapDefinition = GenericTrapFilterDefinition(lineNumber, action, genericTrapType) - if genericTrapType in self._v1Traps: - previousDefinition = self._v1Traps[genericTrapType] - return "Generic trap '%s' conflicts with previous definition at line %d" % (genericTrapType, previousDefinition.lineNumber) - - self._v1Traps[genericTrapType] = genericTrapDefinition - return None - - result = self._validateOID(oidOrGenericTrap) - if result: - return "'%s' is not a valid OID: %s" % (oidOrGenericTrap, result) - - oid = oidOrGenericTrap - filterDef = V1FilterDefinition(lineNumber, action, oid) - if oid.endswith("*"): - if len(remainingTokens) == 2: - return "Specific trap not allowed with globbed OID" - else: - if len(remainingTokens) == 2: - filterDef.specificTrap = remainingTokens[1] - if filterDef.specificTrap != "*" and not filterDef.specificTrap.isdigit(): - return "Specific trap '%s' invalid; must be non-negative integer" % filterDef.specificTrap - else: - return "Missing specific trap number or '*'" - - key = oid - if filterDef.specificTrap != None: - key = ''.join([oid, "-", filterDef.specificTrap]) - - filtersByLevel = self._v1Filters.get(filterDef.levels(), None) - if filtersByLevel == None: - filtersByLevel = {key: filterDef} - self._v1Filters[filterDef.levels()] = filtersByLevel - elif key not in filtersByLevel: - filtersByLevel[key] = filterDef - else: - previousDefinition = filtersByLevel[key] - return "OID '%s' conflicts with previous definition at line %d" % (oid, previousDefinition.lineNumber) - return None - - def _parseV2FilterDefinition(self, lineNumber, action, remainingTokens): - """ - Parse an SNMP V2 filter definition - Valid definitions have one of the following formats: - v2 include|exclude OID - v2 include|exclude GLOBBED_OID - where - OID is an valid OID - GLOBBED_OID is an OID ending with ".*" - - @param lineNumber: The line number of the filter defintion within the file - @type line: int - @param action: The action for this line (include or exclude) - @type line: string - @param remainingTokens: The remaining (unparsed) tokens from the filter definition - @type line: string array - @return: Returns None on success, or an error message on failure - @rtype: string - """ - if len(remainingTokens) > 1: - return "Too many fields found; at most 3 fields allowed for V2 filters" - - oid = "" - if len(remainingTokens) > 0: - oid = remainingTokens[0].strip(".") - result = self._validateOID(oid) - if result: - return "'%s' is not a valid OID: %s" % (oid, result) - - filterDef = V2FilterDefinition(lineNumber, action, oid) - - filtersByLevel = self._v2Filters.get(filterDef.levels(), None) - if filtersByLevel == None: - filtersByLevel = {oid: filterDef} - self._v2Filters[filterDef.levels()] = filtersByLevel - elif oid not in filtersByLevel: - filtersByLevel[oid] = filterDef - else: - previousDefinition = filtersByLevel[oid] - return "OID '%s' conflicts with previous definition at line %d" % (oid, previousDefinition.lineNumber) - return None - - def _validateOID(self, oid): - """ - Simplistic SNMP OID validation. Not trying to enforce some RFC spec - - just weed out some of the more obvious mistakes - """ - if oid == "*": - return None - - if not oid: - return "Empty OID is invalid" - - validChars = set('0123456789.*') - if not all((char in validChars) for char in oid): - return "Invalid character found; only digits, '.' and '*' allowed" - - globCount = oid.count("*") - if globCount > 1 or oid.startswith(".*") or (globCount == 1 and not oid.endswith(".*")): - return "When using '*', only a single '*' at the end of OID is allowed" - - if ".." in oid: - return "Consecutive '.'s not allowed" - - if "." not in oid: - return "At least one '.' required" - return None - - def _readFilters(self): - fileName = self._daemon.options.trapFilterFile - if fileName: - path = zenPath('etc', fileName) - if os.path.exists(path): - with open(path) as filterDefinitionFile: - lineNumber = 0 - for line in filterDefinitionFile: - lineNumber += 1 - if line.startswith('#'): - continue - - # skip blank lines - line = line.strip() - if not line: - continue; - - errorMessage = self._parseFilterDefinition(line, lineNumber) - if errorMessage: - errorMessage = "Failed to parse filter definition file %s at line %d: %s" % (format(path), lineNumber, errorMessage) - raise TrapFilterError(errorMessage) - - self._filtersDefined = 0 != (len(self._v1Traps) + len(self._v1Filters) + len(self._v2Filters)) - if self._filtersDefined: - log.info("Finished reading filter definition file %s", format(path)) - else: - log.warn("No zentrap filters found in %s", format(path)) - else: - errorMessage = "Could find filter definition file %s" % format(path) - raise TrapFilterError(errorMessage) - - def initialize(self): - self._daemon = zope.component.getUtility(ICollector) - self._eventService = zope.component.queryUtility(IEventService) - self._readFilters() - self._initialized = True - - def transform(self, event): - """ - Performs any transforms of the specified event at the collector. - - @param event: The event to transform. - @type event: dict - @return: Returns TRANSFORM_CONTINUE if this event should be forwarded on - to the next transformer in the sequence, TRANSFORM_STOP if no - further transformers should be performed on this event, and - TRANSFORM_DROP if the event should be dropped. - @rtype: int - """ - result = TRANSFORM_CONTINUE - snmpVersion = event.get('snmpVersion', None) - if snmpVersion and self._filtersDefined: - log.debug("Filtering V%s event %s", snmpVersion, event) - if self._dropEvent(event): - log.debug("Dropping event %s", event) - result = TRANSFORM_DROP - else: - log.debug("Skipping filter for event=%s, filtersDefined=%s", - event, self._filtersDefined) - return result - - def _dropEvent(self, event): - """ - Determine if an event should be dropped. Assumes there are some filters defined, so the - default if no matching filter is found should be True; i.e. the event did not match any - existing filter that would include it, so therefore we should drop it. - - @param event: The event to drop or keep. - @return: Returns True if the event should be dropped; False if the event be kept. - @rtype: boolean - """ - result = True - snmpVersion = event.get('snmpVersion', None) - - if snmpVersion == "1": - result = self._dropV1Event(event) - elif snmpVersion == "2": - result = self._dropV2Event(event) - - return result - - def _dropV1Event(self, event): - genericTrap = event.get("snmpV1GenericTrapType", None) - if genericTrap != None and genericTrap in self._genericTraps: - filterDefinition = self._v1Traps.get(genericTrap, None) - if filterDefinition == None: - return True - return filterDefinition.action == "exclude" - - if genericTrap != 6: - log.error("Generic trap '%s' is invalid for V1 event: %s", genericTrap, event) - return True - - enterpriseOID = event.get("snmpV1Enterprise", None) - if enterpriseOID == None: - log.error("No OID found for enterprise-specific trap for V1 event: %s", event) - return True - - specificTrap = event.get("snmpV1SpecificTrap", None) - if specificTrap != None: - key = ''.join([enterpriseOID, "-", str(specificTrap)]) - filterDefinition = self._findFilterByLevel(key, self._v1Filters) - if filterDefinition != None: - log.debug("_dropV1Event: matched definition %s", filterDefinition) - return filterDefinition.action == "exclude" - - key = ''.join([enterpriseOID, "-", "*"]) - filterDefinition = self._findFilterByLevel(key, self._v1Filters) - if filterDefinition != None: - log.debug("_dropV1Event: matched definition %s", filterDefinition) - return filterDefinition.action == "exclude" - - filterDefinition = self.findClosestGlobbedFilter(enterpriseOID, self._v1Filters) - if filterDefinition == None: - log.debug("_dropV1Event: no matching definitions found") - return True - - log.debug("_dropV1Event: matched definition %s", filterDefinition) - return filterDefinition.action == "exclude" - - def _dropV2Event(self, event): - oid = event["oid"] - - # First, try an exact match on the OID - filterDefinition = self._findFilterByLevel(oid, self._v2Filters) - if filterDefinition != None: - log.debug("_dropV2Event: matched definition %s", filterDefinition) - return filterDefinition.action == "exclude" - - # Convert the OID to its globbed equivalent and try that - filterDefinition = self.findClosestGlobbedFilter(oid, self._v2Filters) - if filterDefinition == None: - log.debug("_dropV2Event: no matching definitions found") - return True - - log.debug("_dropV2Event: matched definition %s", filterDefinition) - return filterDefinition.action == "exclude" - - def findClosestGlobbedFilter(self, oid, filtersByLevel): - filterDefinition = None - globbedValue = oid - while globbedValue != "*": - globbedValue = getNextHigherGlobbedOid(globbedValue) - filterDefinition = self._findFilterByLevel(globbedValue, filtersByLevel) - if filterDefinition: - break - return filterDefinition - - def _findFilterByLevel(self, oid, filtersByLevel): - filterDefinition = None - oidLevels = countOidLevels(oid) - filtersByOid = filtersByLevel.get(oidLevels, None) - if filtersByOid != None and len(filtersByOid) > 0: - filterDefinition = filtersByOid.get(oid, None) - return filterDefinition - diff --git a/Products/ZenEvents/configure.zcml b/Products/ZenEvents/configure.zcml index 783dc2f530..d92682e071 100644 --- a/Products/ZenEvents/configure.zcml +++ b/Products/ZenEvents/configure.zcml @@ -1,27 +1,27 @@ - + + - + - + - - - - - + + + + + - + - - + + + diff --git a/Products/ZenEvents/meta.zcml b/Products/ZenEvents/meta.zcml index bfbccade78..e981b4a291 100644 --- a/Products/ZenEvents/meta.zcml +++ b/Products/ZenEvents/meta.zcml @@ -1,25 +1,25 @@ + + + - - + - + - + - - - + diff --git a/Products/ZenEvents/metaconfigure.py b/Products/ZenEvents/metaconfigure.py index ded97b50eb..b7b9c7ae9c 100644 --- a/Products/ZenEvents/metaconfigure.py +++ b/Products/ZenEvents/metaconfigure.py @@ -1,28 +1,38 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from zope.component.zcml import utility -from .interfaces import IPreEventPlugin, IPostEventPlugin, IEventIdentifierPlugin -def _eventPlugin( _context, plugin, pluginInterface, name=None): - if name is None: - name = '.'.join((plugin.__module__, plugin.__name__)) - utility(_context, name=name, factory=plugin, provides=pluginInterface) +from .interfaces import ( + IEventIdentifierPlugin, + IPostEventPlugin, + IPreEventPlugin, +) + def preEventPlugin(_context, plugin, name=None): - _eventPlugin( _context, plugin, IPreEventPlugin, name ) + _eventPlugin(_context, plugin, IPreEventPlugin, name) + def postEventPlugin(_context, plugin, name=None): - _eventPlugin( _context, plugin, IPostEventPlugin, name ) + _eventPlugin(_context, plugin, IPostEventPlugin, name) -def eventIdentifierPlugin( _context, plugin, name=None): + +def eventIdentifierPlugin(_context, plugin, name=None): if name is None: - name = '.'.join((plugin.__module__, plugin.__name__)) - utility(_context, name=name, factory=plugin, provides=IEventIdentifierPlugin) + name = ".".join((plugin.__module__, plugin.__name__)) + utility( + _context, name=name, factory=plugin, provides=IEventIdentifierPlugin + ) + + +def _eventPlugin(_context, plugin, pluginInterface, name=None): + if name is None: + name = ".".join((plugin.__module__, plugin.__name__)) + utility(_context, name=name, factory=plugin, provides=pluginInterface) diff --git a/Products/ZenEvents/metadirectives.py b/Products/ZenEvents/metadirectives.py index 71440b099b..5474bc01ce 100644 --- a/Products/ZenEvents/metadirectives.py +++ b/Products/ZenEvents/metadirectives.py @@ -1,26 +1,30 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +import six -from zope.interface import Interface from zope.configuration.fields import GlobalObject +from zope.interface import Interface from zope.schema import TextLine + class IEventPluginDirective(Interface): """ Registers an event plugin as a named utility. """ + plugin = GlobalObject( - title=u"Plugin", - description=u"The class of the plugin to register" + title=six.u("Plugin"), + description=six.u("The class of the plugin to register"), ) + class IPreEventPluginDirective(IEventPluginDirective): """ Registers an event plugin as a named utility. @@ -32,12 +36,16 @@ class IPostEventPluginDirective(IEventPluginDirective): Registers an event plugin as a named utility. """ + class IEventIdentifierPluginDirective(IEventPluginDirective): """ Registers an event identifier plugin as a named utility """ + name = TextLine( - title=u"Name", - description=u"The name of the event identifier plugin to register", + title=six.u("Name"), + description=six.u( + "The name of the event identifier plugin to register" + ), required=False, ) diff --git a/Products/ZenEvents/skins/zenevents/zepConfig.pt b/Products/ZenEvents/skins/zenevents/zepConfig.pt index 1f6f3f8334..2b681dd0b1 100644 --- a/Products/ZenEvents/skins/zenevents/zepConfig.pt +++ b/Products/ZenEvents/skins/zenevents/zepConfig.pt @@ -11,7 +11,7 @@ } + href="/++resource++zenui/css/xtheme-zenoss.css" /> diff --git a/Products/ZenEvents/syslog_h.py b/Products/ZenEvents/syslog_h.py deleted file mode 100644 index 4dd1a7973f..0000000000 --- a/Products/ZenEvents/syslog_h.py +++ /dev/null @@ -1,70 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -# constants from syslog.h -LOG_EMERGENCY = 0 -LOG_ALERT = 1 -LOG_CRITICAL = 2 -LOG_ERRROR = 3 -LOG_WARNING = 4 -LOG_NOTICE = 5 -LOG_INFO = 6 -LOG_DEBUG = 7 - -LOG_PRIMASK = 0x07 - -def LOG_PRI(p): return p & LOG_PRIMASK -def LOG_MAKEPRI(fac, pri): return fac << 3 | pri - -LOG_KERN = 0 << 3 -LOG_USER = 1 << 3 -LOG_MAIL = 2 << 3 -LOG_DAEMON = 3 << 3 -LOG_AUTH = 4 << 3 -LOG_SYSLOG = 5 << 3 -LOG_LPR = 6 << 3 -LOG_NEWS = 7 << 3 -LOG_UUCP = 8 << 3 -LOG_CRON = 9 << 3 -LOG_AUTHPRIV = 10 << 3 -LOG_FTP = 11 << 3 -LOG_LOCAL0 = 16 << 3 -LOG_LOCAL1 = 17 << 3 -LOG_LOCAL2 = 18 << 3 -LOG_LOCAL3 = 19 << 3 -LOG_LOCAL4 = 20 << 3 -LOG_LOCAL5 = 21 << 3 -LOG_LOCAL6 = 22 << 3 -LOG_LOCAL7 = 23 << 3 - -LOG_NFACILITIES = 24 -LOG_FACMASK = 0x03F8 -def LOG_FAC(p): return (p & LOG_FACMASK) >> 3 - -def LOG_MASK(pri): return 1 << pri -def LOG_UPTO(pri): return (1 << pri + 1) - 1 -# end syslog.h - -def LOG_UNPACK(p): return (LOG_FAC(p), LOG_PRI(p)) - -fac_values = {} # mapping of facility constants to their values -fac_names = {} # mapping of values to names -pri_values = {} -pri_names = {} -for i, j in globals().items(): - if i[:4] == 'LOG_' and isinstance(j, int): - if j > LOG_PRIMASK or i == 'LOG_KERN': - n, v = fac_names, fac_values - else: - n, v = pri_names, pri_values - i = i[4:].lower() - v[i] = j - n[j] = i -del i, j, n, v diff --git a/Products/ZenEvents/tests/testEventMigrate.py b/Products/ZenEvents/tests/testEventMigrate.py deleted file mode 100644 index c1016e7578..0000000000 --- a/Products/ZenEvents/tests/testEventMigrate.py +++ /dev/null @@ -1,455 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2011, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -from Products.ZenTestCase.BaseTestCase import BaseTestCase -from zope.interface import implements -from Products.ZenEvents.events2.processing import AddDeviceContextAndTagsPipe -from Products.ZenEvents.events2.proxy import EventProxy -from Products.ZenMessaging.queuemessaging.interfaces import IQueuePublisher -from Products.ZenEvents.zeneventmigrate import ZenEventMigrate -from Products.ZenUtils.guid.interfaces import IGlobalIdentifier -from ConfigParser import ConfigParser -from datetime import datetime -from itertools import repeat -import re - -import logging -log = logging.getLogger('zen.testEventMigrate') - -#lookup -from Zope2.App import zcml -import Products.ZenossStartup -zcml.load_site() - - -class MockChannel(object): - """ - Mocks out an AMQP channel. - """ - def tx_select(self): - pass - - def tx_commit(self): - pass - -class MockPublisher(object): - """ - Mocks out an IQueuePublisher which saves published events for verification. - """ - implements(IQueuePublisher) - - def __init__(self): - self.msgs = [] - self.channel = MockChannel() - - def publish(self, exchange, routing_key, message, createQueues=None, mandatory=False): - self.msgs.append(message) - -class MockCursor(object): - """ - Mocks out a SQL cursor object. - """ - def __init__(self, conn): - self.conn = conn - self.next_result = None - - def execute(self, sql, args=None): - self.next_result = self.conn.resultForQuery(sql, args) - - def fetchall(self): - return self.next_result - - def fetchone(self): - return self.next_result[0] - - def close(self): - pass - -class MockConnection(object): - """ - Mocks out a SQL connection. - """ - def __init__(self, queries): - self.queries = queries - - def cursor(self): - return MockCursor(self) - - def resultForQuery(self, sql, args=None): - for query, result in self.queries.iteritems(): - if re.search(query, sql): - try: - return result.next() - except StopIteration: - return [] - raise Exception('Unsupported query: %s' % sql) - -class testEventMigrate(BaseTestCase): - - def afterSetUp(self): - super(testEventMigrate, self).afterSetUp() - - self.zeneventmigrate = ZenEventMigrate(app=self.app, connect=True) - - # Initialize config - self.zeneventmigrate.config = ConfigParser() - self.zeneventmigrate.config.add_section(self.zeneventmigrate.config_section) - - # Don't save state to disk - self.zeneventmigrate._storeConfig = lambda *x: None - - # Don't show progress messages - self.zeneventmigrate._progress = lambda *x: None - - def testMigrateSameDeviceClass(self): - """ - Tests that an event sent when a device belongs to a new device class is tagged with the original device class - from the migrated event. - """ - devices = self.dmd.Devices - - original = devices.createOrganizer("/Server/Solaris") - original_guid = IGlobalIdentifier(original).getGUID() - - updated = devices.createOrganizer("/Server/SSH/Solaris") - updated_guid = IGlobalIdentifier(updated).getGUID() - - updated.createInstance('test-solaris10.zenoss.loc') - - evt = { - 'dedupid': "test-solaris10.zenoss.loc|SUNWxscreensaver-hacks|/Change/Set||2|calling function " - "'setProductKey' with 'SUNWxscreensaver-hacks' on object SUNWxscreensaver-hacks", - 'evid': "0002aaaf-e10f-4348-a7b8-ae12573e560a", - 'device': "test-solaris10.zenoss.loc", - 'component': "SUNWxscreensaver-hacks", - 'eventClass': "/Change/Set", - 'eventKey': "", - 'summary': "calling function 'setProductKey' with 'SUNWxscreensaver-hacks' on object SUNWxscreensaver-hacks", - 'message': "calling function 'setProductKey' with 'SUNWxscreensaver-hacks' on object SUNWxscreensaver-hacks", - 'severity': 2, - 'eventState': 0, - 'eventClassKey': "", - 'eventGroup': "", - 'stateChange': datetime(2011, 6, 8, 13, 24, 20), - 'firstTime': 1307557460.044, - 'lastTime': 1307557460.044, - 'count': 1, - 'prodState': 1000, - 'suppid': '', - 'manager': '', - 'agent': 'ApplyDataMap', - 'DeviceClass': '/Server/Solaris', - 'Location': '', - 'Systems': '|', - 'DeviceGroups': '|', - 'ipAddress': '10.175.211.23', - 'facility': 'unknown', - 'priority': -1, - 'ntevid': 0, - 'ownerid': '', - 'deletedTime': datetime(2011, 6, 8, 13, 24, 20), - 'clearid': None, - 'DevicePriority': 3, - 'eventClassMapping': '', - 'monitor': '', - } - - events = [evt] - queries = { - r'^SELECT COUNT\(\*\) AS num_rows FROM status': repeat([{ 'num_rows': len(events) }]), - r'^SELECT \* FROM status': [events].__iter__(), - r'^SELECT evid, name, value FROM detail': repeat([]), - r'^SELECT \* FROM log WHERE evid IN': repeat([]), - } - conn = MockConnection(queries) - mock_publisher = MockPublisher() - self.zeneventmigrate._migrate_events(conn, mock_publisher, True) - self.assertEquals(1, len(mock_publisher.msgs)) - event_summary = mock_publisher.msgs[0] - event_occurrence = event_summary.occurrence[0] - for d in event_occurrence.details: - if d.name == EventProxy.DEVICE_CLASS_DETAIL_KEY: - self.assertEquals([original.getOrganizerName()], d.value) - - device_class_tags = set() - for t in event_occurrence.tags: - if t.type == AddDeviceContextAndTagsPipe.DEVICE_DEVICECLASS_TAG_KEY: - device_class_tags.update(t.uuid) - - self.assertTrue(original_guid in device_class_tags, msg="Event wasn't tagged with original device class") - self.assertFalse(updated_guid in device_class_tags, msg="Event was tagged with new device class") - - def testMigrateSameLocation(self): - """ - Tests that an event sent when a device belongs to a new location is tagged with the original location - from the migrated event. - """ - devices = self.dmd.Devices - locations = self.dmd.Locations - - original = locations.createOrganizer("/Austin") - original_guid = IGlobalIdentifier(original).getGUID() - - updated = locations.createOrganizer("/Annapolis") - updated_guid = IGlobalIdentifier(updated).getGUID() - - device_class = devices.createOrganizer("/Server/Windows/WMI/Active Directory/2008") - device = device_class.createInstance('test-win2008-ad.zenoss.loc') - device.setLocation(updated.getOrganizerName()) - - evt = { - 'dedupid': "test-win2008-ad.zenoss.loc|zeneventlog|/Status/Wmi||4|\n Could not read the Windows" - " event log (ExecNotificationQuery on test-win2008-ad.zenoss.loc (DOS code 0x800700a4)). C", - 'evid': "00049aee-b0bc-4621-8393-9b0cf831afc4", - 'device': "test-win2008-ad.zenoss.loc", - 'component': "zeneventlog", - 'eventClass': "/Status/Wmi", - 'eventKey': "", - 'summary': "Could not read the Windows event log (ExecNotificationQuery on test-win2008-ad.zenoss.loc (DOS" - " code 0x800700a4)). C", - 'message': "Could not read the Windows event log (ExecNotificationQuery on test-win2008-ad.zenoss.loc (DOS" - " code 0x800700a4)). Check your username/password settings and verify network connectivity.", - 'severity': 4, - 'eventState': 0, - 'eventClassKey': "", - 'eventGroup': "", - 'stateChange': datetime(2011, 6, 9, 22, 39, 48), - 'firstTime': 1307677188.839, - 'lastTime': 1307677188.839, - 'count': 1, - 'prodState': 1000, - 'suppid': '', - 'manager': 'pwarren-dev.zenoss.loc', - 'agent': 'zeneventlog', - 'DeviceClass': '/Server/Windows/WMI/Active Directory/2008', - 'Location': '/Austin', - 'Systems': '|', - 'DeviceGroups': '|', - 'ipAddress': '10.175.211.197', - 'facility': 'unknown', - 'priority': -1, - 'ntevid': 0, - 'ownerid': '', - 'deletedTime': datetime(2011, 6, 9, 22, 39, 48), - 'clearid': '947d299f-cc25-4250-a8de-b8fd8bc2b06d', - 'DevicePriority': 3, - 'eventClassMapping': '', - 'monitor': 'localhost', - } - - events = [evt] - queries = { - r'^SELECT COUNT\(\*\) AS num_rows FROM status': repeat([{ 'num_rows': len(events) }]), - r'^SELECT \* FROM status': [events].__iter__(), - r'^SELECT evid, name, value FROM detail': repeat([]), - r'^SELECT \* FROM log WHERE evid IN': repeat([]), - } - conn = MockConnection(queries) - mock_publisher = MockPublisher() - self.zeneventmigrate._migrate_events(conn, mock_publisher, True) - self.assertEquals(1, len(mock_publisher.msgs)) - event_summary = mock_publisher.msgs[0] - event_occurrence = event_summary.occurrence[0] - for d in event_occurrence.details: - if d.name == EventProxy.DEVICE_LOCATION_DETAIL_KEY: - self.assertEquals([original.getOrganizerName()], d.value) - - device_location_tags = set() - for t in event_occurrence.tags: - if t.type == AddDeviceContextAndTagsPipe.DEVICE_LOCATION_TAG_KEY: - device_location_tags.update(t.uuid) - - self.assertTrue(original_guid in device_location_tags, msg="Event wasn't tagged with original location") - self.assertFalse(updated_guid in device_location_tags, msg="Event was tagged with new location") - - def testMigrateSameGroups(self): - """ - Tests that an event sent when a device belongs to new device groups is tagged with the original device groups - from the migrated event. - """ - devices = self.dmd.Devices - groups = self.dmd.Groups - - group_first = groups.createOrganizer('/First') - group_second = groups.createOrganizer('/Second') - group_third = groups.createOrganizer('/Third') - group_first_nested = groups.createOrganizer('/First/FirstNested') - - group_fourth = groups.createOrganizer('/Fourth') - group_fifth = groups.createOrganizer('/Fifth') - - device_class = devices.createOrganizer("/Server/Linux") - device = device_class.createInstance('pwarren-dev.zenoss.loc') - device.setGroups([group_fourth.getOrganizerName(), group_fifth.getOrganizerName()]) - - evt = { - 'dedupid': "pwarren-dev.zenoss.loc|snmpd|||2|Received SNMP packet(s) from UDP: [10.175.210.74]:48219", - 'evid': "0015e762-1983-40ad-a966-d2a66ee40fd9", - 'device': "pwarren-dev.zenoss.loc", - 'component': "snmpd", - 'eventClass': "/Unknown", - 'eventKey': "", - 'summary': "Received SNMP packet(s) from UDP: [10.175.210.74]:48219", - 'message': "Received SNMP packet(s) from UDP: [10.175.210.74]:48219", - 'severity': 2, - 'eventState': 0, - 'eventClassKey': "snmpd", - 'eventGroup': "syslog", - 'stateChange': datetime(2011, 6, 13, 3, 10, 13), - 'firstTime': 1307952609.997, - 'lastTime': 1307952609.997, - 'count': 1, - 'prodState': 1000, - 'suppid': '', - 'manager': 'pwarren-dev.zenoss.loc', - 'agent': 'zensyslog', - 'DeviceClass': '/Server/Linux', - 'Location': '/Austin', - 'Systems': '|/Production|/Development', - 'DeviceGroups': '|/First|/Second|/Third|/First/FirstNested', - 'ipAddress': '10.175.210.74', - 'facility': 'nfacilit', - 'priority': 6, - 'ntevid': 0, - 'ownerid': '', - 'deletedTime': datetime(2011, 6, 13, 7, 11, 8), - 'clearid': None, - 'DevicePriority': 3, - 'eventClassMapping': '', - 'monitor': 'localhost', - } - - events = [evt] - queries = { - r'^SELECT COUNT\(\*\) AS num_rows FROM status': repeat([{ 'num_rows': len(events) }]), - r'^SELECT \* FROM status': [events].__iter__(), - r'^SELECT evid, name, value FROM detail': repeat([]), - r'^SELECT \* FROM log WHERE evid IN': repeat([]), - } - conn = MockConnection(queries) - mock_publisher = MockPublisher() - self.zeneventmigrate._migrate_events(conn, mock_publisher, True) - self.assertEquals(1, len(mock_publisher.msgs)) - event_summary = mock_publisher.msgs[0] - event_occurrence = event_summary.occurrence[0] - - expected_group_names = set([g.getOrganizerName() for g in [group_first, group_second, group_third, - group_first_nested]]) - found_group_names = set() - - for d in event_occurrence.details: - if d.name == EventProxy.DEVICE_GROUPS_DETAIL_KEY: - found_group_names.update(d.value) - diff_names = expected_group_names - found_group_names - self.assertEquals(0, len(diff_names)) - - expected_group_tags = set([IGlobalIdentifier(g).getGUID() for g in [group_first, group_second, group_third, - group_first_nested]]) - found_group_tags = set() - for t in event_occurrence.tags: - if t.type == AddDeviceContextAndTagsPipe.DEVICE_GROUPS_TAG_KEY: - found_group_tags.update(t.uuid) - - diff_tags = expected_group_tags - found_group_tags - self.assertEquals(0, len(diff_tags)) - - def testMigrateSameSystems(self): - """ - Tests that an event sent when a device belongs to new systems is tagged with the original systems - from the migrated event. - """ - devices = self.dmd.Devices - groups = self.dmd.Systems - - system_production = groups.createOrganizer('/Production') - system_development = groups.createOrganizer('/Development') - - system_additional = groups.createOrganizer('/Additional') - system_preprod = groups.createOrganizer('/PreProduction') - - device_class = devices.createOrganizer("/Server/Linux") - device = device_class.createInstance('pwarren-dev.zenoss.loc') - device.setSystems([system_additional.getOrganizerName(), system_preprod.getOrganizerName()]) - - evt = { - 'dedupid': "pwarren-dev.zenoss.loc|snmpd|||2|Received SNMP packet(s) from UDP: [10.175.210.74]:48219", - 'evid': "0015e762-1983-40ad-a966-d2a66ee40fd9", - 'device': "pwarren-dev.zenoss.loc", - 'component': "snmpd", - 'eventClass': "/Unknown", - 'eventKey': "", - 'summary': "Received SNMP packet(s) from UDP: [10.175.210.74]:48219", - 'message': "Received SNMP packet(s) from UDP: [10.175.210.74]:48219", - 'severity': 2, - 'eventState': 0, - 'eventClassKey': "snmpd", - 'eventGroup': "syslog", - 'stateChange': datetime(2011, 6, 13, 3, 10, 13), - 'firstTime': 1307952609.997, - 'lastTime': 1307952609.997, - 'count': 1, - 'prodState': 1000, - 'suppid': '', - 'manager': 'pwarren-dev.zenoss.loc', - 'agent': 'zensyslog', - 'DeviceClass': '/Server/Linux', - 'Location': '/Austin', - 'Systems': '|/Production|/Development', - 'DeviceGroups': '|/First|/Second|/Third|/First/FirstNested', - 'ipAddress': '10.175.210.74', - 'facility': 'nfacilit', - 'priority': 6, - 'ntevid': 0, - 'ownerid': '', - 'deletedTime': datetime(2011, 6, 13, 7, 11, 8), - 'clearid': None, - 'DevicePriority': 3, - 'eventClassMapping': '', - 'monitor': 'localhost', - } - - events = [evt] - queries = { - r'^SELECT COUNT\(\*\) AS num_rows FROM status': repeat([{ 'num_rows': len(events) }]), - r'^SELECT \* FROM status': [events].__iter__(), - r'^SELECT evid, name, value FROM detail': repeat([]), - r'^SELECT \* FROM log WHERE evid IN': repeat([]), - } - conn = MockConnection(queries) - mock_publisher = MockPublisher() - self.zeneventmigrate._migrate_events(conn, mock_publisher, True) - self.assertEquals(1, len(mock_publisher.msgs)) - event_summary = mock_publisher.msgs[0] - event_occurrence = event_summary.occurrence[0] - - expected_system_names = set([s.getOrganizerName() for s in [system_development, system_production]]) - found_system_names = set() - - for d in event_occurrence.details: - if d.name == EventProxy.DEVICE_SYSTEMS_DETAIL_KEY: - found_system_names.update(d.value) - diff_names = expected_system_names - found_system_names - self.assertEquals(0, len(diff_names)) - - expected_system_tags = set([IGlobalIdentifier(s).getGUID() for s in [system_development, system_production]]) - found_system_tags = set() - for t in event_occurrence.tags: - if t.type == AddDeviceContextAndTagsPipe.DEVICE_SYSTEMS_TAG_KEY: - found_system_tags.update(t.uuid) - - diff_tags = expected_system_tags - found_system_tags - self.assertEquals(0, len(diff_tags)) - - -def test_suite(): - from unittest import TestSuite, makeSuite - suite = TestSuite() - suite.addTest(makeSuite(testEventMigrate)) - return suite diff --git a/Products/ZenEvents/tests/testSyslogProcessing.py b/Products/ZenEvents/tests/testSyslogProcessing.py deleted file mode 100644 index 5c2ddf501a..0000000000 --- a/Products/ZenEvents/tests/testSyslogProcessing.py +++ /dev/null @@ -1,139 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2008, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -from Products.ZenEvents.SyslogProcessing import SyslogProcessor -from Products.ZenTestCase.BaseTestCase import BaseTestCase - -class SyslogProcessingTest(BaseTestCase): - - def sendEvent(self, evt): - "Fakeout sendEvent() method" - self.sent = evt - - def testBuildEventClassKey(self): - "Simple, brain-dead testing of SyslogProcessor" - base = dict(device='localhost', component='component', severity=3) - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - self.assert_(s.buildEventClassKey({}) == {}) - evt = dict(eventClassKey='akey', **base) - self.assert_(s.buildEventClassKey(evt.copy()) == evt) - evt = dict(eventClassKey='akey', ntevid='1234', **base) - self.assert_(s.buildEventClassKey(evt.copy()) == evt) - evt = dict(ntevid='1234', **base) - self.assert_(s.buildEventClassKey(evt)['eventClassKey'] == - 'component_1234') - evt = dict(**base) - self.assert_(s.buildEventClassKey(evt)['eventClassKey'] == 'component') - - def testProcess(self): - long_text_message = "long text message " * 20 - msg = "2016-08-08T11:07:33.660820-04:00 devname=localhost log_id=98765434 type=component {}".format(long_text_message) - ipaddr = "127.0.0.1" - host = "8080" - rtime = "1416111" - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - s.process(msg, ipaddr, host, rtime) - evt = self.sent - self.assertEquals(evt.get('device'), host) - self.assertEquals(evt.get('ipAddress'), ipaddr) - self.assertEquals(evt.get('firstTime'), rtime) - self.assertEquals(evt.get('lastTime'), rtime) - self.assertEquals(evt.get('eventGroup'), 'syslog') - self.assertEquals(evt.get('message'), unicode(msg)) - self.assertEquals(evt.get('summary'), unicode(msg)) - - def testCheckFortigate(self): - """ - Test of Fortigate syslog message parsing - """ - msg = "date=xxxx devname=blue log_id=987654321 type=myComponent blah blah blah" - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag( {}, msg ) - - self.assertEquals( evt.get('eventClassKey'), '987654321' ) - self.assertEquals( evt.get('component'), 'myComponent' ) - self.assertEquals( evt.get('summary'), 'devname=blue log_id=987654321 type=myComponent blah blah blah' ) - - def testCheckCiscoPortStatus(self): - """ - Test of Cisco port status syslog message parsing - """ - msg = "Process 10532, Nbr 192.168.10.13 on GigabitEthernet2/15 from LOADING to FULL, Loading Done" - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag( {}, msg ) - - self.assertEquals( evt.get('device'), '192.168.10.13' ) - self.assertEquals( evt.get('process_id'), '10532' ) - self.assertEquals( evt.get('interface'), 'GigabitEthernet2/15' ) - self.assertEquals( evt.get('start_state'), 'LOADING' ) - self.assertEquals( evt.get('end_state'), 'FULL' ) - self.assertEquals( evt.get('summary'), 'Loading Done') - - def testCiscoVpnConcentrator(self): - """ - Test of Cisco VPN Concentrator syslog message parsing - """ - msg = "54884 05/25/2009 13:41:14.060 SEV=3 HTTP/42 RPT=4623 Error on socket accept." - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag( {}, msg ) - - self.assertEquals( evt.get('eventClassKey'), 'HTTP/42' ) - self.assertEquals( evt.get('summary'), 'Error on socket accept.' ) - - def testCiscoStandardMessageSeverity(self): - """ - Test that the event severity is correctly extracted from the - Cisco standard message body - """ - msg = '2014 Jan 31 19:45:51 R2-N6K1-2010-P1 %ETH_PORT_CHANNEL-5-CREATED: port-channel1 created' - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag( {}, msg ) - self.assertEquals( evt.get('overwriteSeverity'), '5' ) - - def testDellSyslog(self): - """ - Test dell stuf - """ - msg = ("1-Oct-2009 23:00:00.383809:snapshotDelete.cc:290:INFO:8.2.5:Successfully deleted snapshot 'UNVSQLCLUSTERTEMPDB-2009-09-30-23:00:14.11563'.") - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag( {}, msg ) - - self.assertEquals( evt.get('eventClassKey'), '8.2.5' ) - self.assertEquals( evt.get('summary'), - "Successfully deleted snapshot 'UNVSQLCLUSTERTEMPDB-2009-09-30-23:00:14.11563'.") - - def testDellSyslog2(self): - """ - Test dell stuf - """ - msg = ("2626:48:VolExec:27-Aug-2009 13:15:58.072049:VE_VolSetWorker.hh:75:WARNING:43.3.2:Volume volumeName has reached 96 percent of its reported size and is currently using 492690MB.") - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag( {}, msg ) - - self.assertEquals( evt.get('eventClassKey'), '43.3.2' ) - self.assertEquals( evt.get('summary'), - "Volume volumeName has reached 96 percent of its reported size and is currently using 492690MB.") - - def testNetAppSyslogParser(self): - """ - Test NetApp syslog parser. - """ - msg = '[deviceName: 10/100/1000/e1a:warning]: Client 10.0.0.101 (xid 4251521131) is trying to access an unexported mount (fileid 64, snapid 0, generation 6111516 and flags 0x0 on volume 0xc97d89a [No volume name available])' - s = SyslogProcessor(self.sendEvent, 6, False, 'localhost', 3) - evt = s.parseTag({}, msg) - self.assertEquals(evt.get('component'), '10/100/1000/e1a') - self.assertEquals(evt.get('summary'), 'Client 10.0.0.101 (xid 4251521131) is trying to access an unexported mount (fileid 64, snapid 0, generation 6111516 and flags 0x0 on volume 0xc97d89a [No volume name available])') - - -def test_suite(): - from unittest import TestSuite, makeSuite - suite = TestSuite() - suite.addTest(makeSuite(SyslogProcessingTest)) - return suite diff --git a/Products/ZenEvents/tests/testTransforms.py b/Products/ZenEvents/tests/testTransforms.py index 63bc52e7dc..12f0090eb6 100644 --- a/Products/ZenEvents/tests/testTransforms.py +++ b/Products/ZenEvents/tests/testTransforms.py @@ -1,19 +1,26 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, 2011, 2012, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from mock import patch from Products.ZenTestCase.BaseTestCase import BaseTestCase from Products.ZenEvents.zeneventd import EventPipelineProcessor from Products.ZenEvents.events2.processing import DropEvent from Products.ZenEvents.events2.proxy import EventProxy -from zenoss.protocols.protobufs.zep_pb2 import Event, STATUS_CLOSED, STATUS_SUPPRESSED, SEVERITY_ERROR,\ - SEVERITY_WARNING, SEVERITY_CLEAR +from zenoss.protocols.protobufs.zep_pb2 import ( + Event, + STATUS_CLOSED, + STATUS_SUPPRESSED, + SEVERITY_ERROR, + SEVERITY_WARNING, + SEVERITY_CLEAR, +) from zenoss.protocols.protobufs.model_pb2 import DEVICE, COMPONENT from Products.ZenUtils.guid.interfaces import IGlobalIdentifier @@ -24,11 +31,15 @@ # Extract the used blocks from the event's message import re - m = re.search("threshold of [^:]+: current value ([\d\.]+)", evt.message) + m = re.search( + "threshold of [^:]+: current value ([\d\.]+)", evt.message + ) if not m: continue # Get the total blocks from the model. Adjust by specified offset. - totalBlocks = f.totalBlocks * getattr(device, "zFileSystemSizeOffset", 1.0) + totalBlocks = f.totalBlocks * getattr( + device, "zFileSystemSizeOffset", 1.0 + ) totalBytes = totalBlocks * f.blockSize usedBytes = None @@ -47,41 +58,52 @@ free = convToUnits(totalBytes - usedBytes) # Make a nicer summary - evt.summary = "disk space threshold: %3.1f%% used (%s free)" % (p, free) + evt.summary = ( + "disk space threshold: %3.1f%% used (%s free)" + ) % (p, free) evt.message = evt.summary break """ -class testTransforms(BaseTestCase): +PATH = {"src": "Products.ZenEvents.zeneventd"} +class TestTransforms(BaseTestCase): def afterSetUp(self): - super(testTransforms, self).afterSetUp() + super(TestTransforms, self).afterSetUp() class MockConnection(object): def sync(self): pass + self.dmd._p_jar = MockConnection() - self.dmd.Events.createOrganizer('/Perf/Filesystem') + self.dmd.Events.createOrganizer("/Perf/Filesystem") self.dmd.Events.Perf.Filesystem.transform = perfFilesystemTransform + self.MetricReporter_patcher = patch( + "{src}.MetricReporter".format(**PATH) + ) + self.MetricReporter_mock = self.MetricReporter_patcher.start() + self.addCleanup(self.MetricReporter_patcher.stop) + self.processor = EventPipelineProcessor(self.dmd) self.processor.reporter.stop() def _processEvent(self, event): - # Don't return a sub-message from a C++ protobuf class - can crash as the parent is GC'd + # Don't return a sub-message from a C++ protobuf class - can + # crash as the parent is GC'd. return self.processor.processMessage(event) - + def testPerfFileSystemTransformPerfFS(self): """ Test to make sure that the standard transform on the /Perf/Filesystem event class works properly for stock performance templates. """ - + # Test an example event from a standard SNMP device. - device = self.dmd.Devices.createInstance('snmpdevice') - device.os.addFileSystem('/', False) + device = self.dmd.Devices.createInstance("snmpdevice") + device.os.addFileSystem("/", False) fs = device.os.filesystems()[0] - fs.mount = '/' + fs.mount = "/" fs.blockSize = 4096 fs.totalBlocks = 29221228 @@ -91,19 +113,24 @@ def testPerfFileSystemTransformPerfFS(self): event.actor.element_sub_identifier = fs.name() event.actor.element_sub_type_id = COMPONENT event.severity = SEVERITY_WARNING - event.event_key = 'usedBlocks_usedBlocks|high disk usage' - event.event_class = '/Perf/Filesystem' - event.summary = 'threshold of high disk usage exceeded: current value 23476882.00' + event.event_key = "usedBlocks_usedBlocks|high disk usage" + event.event_class = "/Perf/Filesystem" + event.summary = ( + "threshold of high disk usage exceeded: current value 23476882.00" + ) processed = self._processEvent(event) - self.assertEquals(processed.event.summary, 'disk space threshold: 80.3% used (21.9GB free)') - + self.assertEquals( + processed.event.summary, + "disk space threshold: 80.3% used (21.9GB free)", + ) + def testPerfFileSystemTransformPerfmon(self): # Test an example event from a standard Perfmon device. - device = self.dmd.Devices.createInstance('perfmondevice') - device.os.addFileSystem('C', False) + device = self.dmd.Devices.createInstance("perfmondevice") + device.os.addFileSystem("C", False) fs = device.os.filesystems()[0] - fs.mount = ' Label:C: Serial Number: 1471843B' + fs.mount = " Label:C: Serial Number: 1471843B" fs.blockSize = 8192 fs.totalBlocks = 1047233 @@ -113,19 +140,24 @@ def testPerfFileSystemTransformPerfmon(self): event.actor.element_sub_identifier = fs.name() event.actor.element_sub_type_id = COMPONENT event.severity = SEVERITY_WARNING - event.event_key = 'FreeMegabytes_FreeMegabytes' - event.event_class = '/Perf/Filesystem' - event.summary = 'threshold of low disk space not met: current value 4156.00' - + event.event_key = "FreeMegabytes_FreeMegabytes" + event.event_class = "/Perf/Filesystem" + event.summary = ( + "threshold of low disk space not met: current value 4156.00" + ) + processed = self._processEvent(event) - self.assertEquals(processed.event.summary, 'disk space threshold: 49.2% used (4.1GB free)') - + self.assertEquals( + processed.event.summary, + "disk space threshold: 49.2% used (4.1GB free)", + ) + def testPerfFileSystemTransformSSH(self): # Test an example event from a standard SSH device. - device = self.dmd.Devices.createInstance('sshdevice') - device.os.addFileSystem('/', False) + device = self.dmd.Devices.createInstance("sshdevice") + device.os.addFileSystem("/", False) fs = device.os.filesystems()[0] - fs.mount = '/' + fs.mount = "/" fs.blockSize = 1024 fs.totalBlocks = 149496116 @@ -135,27 +167,32 @@ def testPerfFileSystemTransformSSH(self): event.actor.element_sub_identifier = fs.id event.actor.element_sub_type_id = COMPONENT event.severity = SEVERITY_WARNING - event.event_key = 'disk|disk_usedBlocks|Free Space 90 Percent' - event.event_class = '/Perf/Filesystem' - event.summary = 'threshold of Free Space 90 Percent exceeded: current value 73400348.00' + event.event_key = "disk|disk_usedBlocks|Free Space 90 Percent" + event.event_class = "/Perf/Filesystem" + event.summary = ( + "threshold of Free Space 90 Percent exceeded: " + "current value 73400348.00" + ) processed = self._processEvent(event) - self.assertEquals(processed.event.summary, 'disk space threshold: 49.1% used (72.6GB free)') - + self.assertEquals( + processed.event.summary, + "disk space threshold: 49.1% used (72.6GB free)", + ) + def testActorReidentificationFromEventClassKeyTransform(self): """ - Verify that changing the device in a transform properly reidentifies the device - when matching an event by eventClassKey. + Verify that changing the device in a transform properly reidentifies + the device when matching an event by eventClassKey. """ - device_a = self.dmd.Devices.createInstance("transform_device_a") # Related: ZEN-1419 # If you change a device from within a transform like so: - # + # # evt.device = 'my_new_device' # - # The processing pipeline will recognize this and re-run the + # The processing pipeline will recognize this and re-run the # identification pipes. Before it re-runs these pipes though, it will # clear several properties related to the device, one of which is the # device/element UUID. During the Identification pipe, if the UUID @@ -167,11 +204,11 @@ def testActorReidentificationFromEventClassKeyTransform(self): device_b = self.dmd.Devices.createInstance("transform_device_b") - _transform_key = 'transform_test_key' + _transform_key = "transform_test_key" _transform = """ evt.device = '%s' """ - self.dmd.Events.createOrganizer('/transform_test') + self.dmd.Events.createOrganizer("/transform_test") self.dmd.Events.transform_test.transform = _transform % device_b.id # the organizer above contains the transform, no create an instance @@ -182,7 +219,7 @@ def testActorReidentificationFromEventClassKeyTransform(self): event.actor.element_identifier = device_a.id event.actor.element_type_id = DEVICE event.severity = SEVERITY_WARNING - event.summary = 'Testing transforms.' + event.summary = "Testing transforms." detail = event.details.add() detail.name = EventProxy.DEVICE_IP_ADDRESS_DETAIL_KEY @@ -192,10 +229,14 @@ def testActorReidentificationFromEventClassKeyTransform(self): event.event_class_key = _transform_key processed = self._processEvent(event) - self.assertEquals(device_b.id, processed.event.actor.element_identifier) - self.assertEquals(IGlobalIdentifier(device_b).getGUID(), - processed.event.actor.element_uuid) - + self.assertEquals( + device_b.id, processed.event.actor.element_identifier + ) + self.assertEquals( + IGlobalIdentifier(device_b).getGUID(), + processed.event.actor.element_uuid, + ) + def testActorReidentificationFromEventClassKeyTransformWithComponent(self): """ Verify that changing the device in a transform properly reidentifies @@ -210,16 +251,18 @@ def testActorReidentificationFromEventClassKeyTransformWithComponent(self): devB.os.addFileSystem("component", False) devB.setManageIp("192.168.100.101") - _transform_key = 'transform_test_key' - self.dmd.Events.createOrganizer('/transform_test') - self.dmd.Events.transform_test.transform = "evt.device = '%s'" % devB.id + _transform_key = "transform_test_key" + self.dmd.Events.createOrganizer("/transform_test") + self.dmd.Events.transform_test.transform = ( + "evt.device = '%s'" % devB.id + ) self.dmd.Events.transform_test.createInstance(_transform_key) event = Event() event.actor.element_identifier = devA.id event.actor.element_type_id = DEVICE event.severity = SEVERITY_WARNING - event.summary = 'Testing transforms on component.' + event.summary = "Testing transforms on component." event.actor.element_sub_type_id = COMPONENT event.actor.element_sub_identifier = devA.getDeviceComponents()[0].id @@ -230,81 +273,85 @@ def testActorReidentificationFromEventClassKeyTransformWithComponent(self): # Match the transform by event_class_key event.event_class_key = _transform_key processed = self._processEvent(event) - self.assertEquals(IGlobalIdentifier(devB.getDeviceComponents()[0]).getGUID(), - processed.event.actor.element_sub_uuid) - + self.assertEquals( + IGlobalIdentifier(devB.getDeviceComponents()[0]).getGUID(), + processed.event.actor.element_sub_uuid, + ) + def testIntSeverityTransform(self): """ Transform the event severity to a string and see if it evaluates. """ transform = 'evt.severity="0"; evt.summary="transformed"' - self.dmd.Events.createOrganizer('/Perf/Filesystem') + self.dmd.Events.createOrganizer("/Perf/Filesystem") self.dmd.Events.Perf.Filesystem.transform = transform event = Event() - event.actor.element_identifier = 'localhost' + event.actor.element_identifier = "localhost" event.actor.element_type_id = DEVICE event.severity = SEVERITY_ERROR - event.event_class = '/Perf/Filesystem' - event.summary = 'bad thingy' + event.event_class = "/Perf/Filesystem" + event.summary = "bad thingy" processed = self._processEvent(event) self.assertEqual(SEVERITY_CLEAR, processed.event.severity) - self.assertEqual('transformed', processed.event.summary) + self.assertEqual("transformed", processed.event.summary) self.assert_(isinstance(processed.event.severity, int)) def testActionDropped(self): transform = 'evt._action="drop"' - self.dmd.Events.createOrganizer('/Perf/Filesystem') + self.dmd.Events.createOrganizer("/Perf/Filesystem") self.dmd.Events.Perf.Filesystem.transform = transform event = Event() - event.actor.element_identifier = 'localhost' + event.actor.element_identifier = "localhost" event.actor.element_type_id = DEVICE event.severity = SEVERITY_ERROR - event.event_class = '/Perf/Filesystem' - event.summary = 'should be dropped' + event.event_class = "/Perf/Filesystem" + event.summary = "should be dropped" self.assertRaises(DropEvent, self._processEvent, event) def testActionHistory(self): transform = 'evt._action="history"' - self.dmd.Events.createOrganizer('/Perf/Filesystem') + self.dmd.Events.createOrganizer("/Perf/Filesystem") self.dmd.Events.Perf.Filesystem.transform = transform event = Event() - event.actor.element_identifier = 'localhost' + event.actor.element_identifier = "localhost" event.actor.element_type_id = DEVICE event.severity = SEVERITY_ERROR - event.event_class = '/Perf/Filesystem' - event.summary = 'should be closed' + event.event_class = "/Perf/Filesystem" + event.summary = "should be closed" processed = self._processEvent(event) self.assertEqual(STATUS_CLOSED, processed.event.status) def testActionStatusDoesntChangeSuppressed(self): """ - If an event comes in as suppressed and the _action says to keep it in _status (the default), - make sure that we don't accidentally change the status of the event back to STATUS_NEW. + If an event comes in as suppressed and the _action says to keep it in + _status (the default), make sure that we don't accidentally change + the status of the event back to STATUS_NEW. """ transform = 'evt._action="status"' - self.dmd.Events.createOrganizer('/Perf/Filesystem') + self.dmd.Events.createOrganizer("/Perf/Filesystem") self.dmd.Events.Perf.Filesystem.transform = transform event = Event() - event.actor.element_identifier = 'localhost' + event.actor.element_identifier = "localhost" event.actor.element_type_id = DEVICE event.severity = SEVERITY_ERROR event.status = STATUS_SUPPRESSED - event.event_class = '/Perf/Filesystem' - event.summary = 'should be suppressed' + event.event_class = "/Perf/Filesystem" + event.summary = "should be suppressed" processed = self._processEvent(event) self.assertEqual(STATUS_SUPPRESSED, processed.event.status) -def test_suite(): - from unittest import TestSuite, makeSuite - suite = TestSuite() - suite.addTest(makeSuite(testTransforms)) - return suite +# def test_suite(): +# from unittest import TestSuite, makeSuite +# +# suite = TestSuite() +# suite.addTest(makeSuite(testTransforms)) +# return suite diff --git a/Products/ZenEvents/tests/testTrapFilter.py b/Products/ZenEvents/tests/testTrapFilter.py deleted file mode 100644 index f224de8bcd..0000000000 --- a/Products/ZenEvents/tests/testTrapFilter.py +++ /dev/null @@ -1,1091 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2015, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -#runtests -v -t unit Products.ZenEvents -m testZentrap - -from Products.ZenEvents.TrapFilter import BaseFilterDefinition -from Products.ZenEvents.TrapFilter import OIDBasedFilterDefinition -from Products.ZenEvents.TrapFilter import GenericTrapFilterDefinition -from Products.ZenEvents.TrapFilter import V1FilterDefinition -from Products.ZenEvents.TrapFilter import V2FilterDefinition -from Products.ZenEvents.TrapFilter import TrapFilter -from Products.ZenHub.interfaces import \ - TRANSFORM_CONTINUE, \ - TRANSFORM_DROP -from Products.ZenTestCase.BaseTestCase import BaseTestCase - - -class OIDBasedFilterDefinitionTest(BaseTestCase): - def testEQByOID(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - self.assert_(base1 == base2) - - def testEQByOIDFails(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = OIDBasedFilterDefinition(0, "include", "5.4.3.2.1") - self.assert_(base1 != base2) - - def testEQByOIDIgnoresAction(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = OIDBasedFilterDefinition(0, "exclude", "1.2.3.4.5") - self.assert_(base1 == base2) - - def testEQByOIDFailsForDifferentClass(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = BaseFilterDefinition(0, "include") - self.assert_(base1 != base2) - - def testHash(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - self.assert_(base1.__hash__() == base2.__hash__()) - - def testHashFails(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = OIDBasedFilterDefinition(0, "include", "5.4.3.2.1") - self.assert_(base1.__hash__() != base2.__hash__()) - - def testHashIgnoresAction(self): - base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") - base2 = OIDBasedFilterDefinition(0, "exclude", "1.2.3.4.5") - self.assert_(base1.__hash__() == base2.__hash__()) - -class GenericTrapFilterDefinitionTest(BaseTestCase): - def testEQByOID(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = GenericTrapFilterDefinition(0, "include", "1") - self.assert_(base1 == base2) - - def testEQByOIDFails(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = GenericTrapFilterDefinition(0, "include", "5") - self.assert_(base1 != base2) - - def testEQByOIDIgnoresAction(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = GenericTrapFilterDefinition(0, "exclude", "1") - self.assert_(base1 == base2) - - def testEQByOIDFailsForDifferentClass(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = BaseFilterDefinition(0, "include") - self.assert_(base1 != base2) - - def testHash(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = GenericTrapFilterDefinition(0, "include", "1") - self.assertEquals(base1.__hash__(), base2.__hash__()) - - def testHashFails(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = GenericTrapFilterDefinition(0, "include", "2") - self.assertNotEquals(base1.__hash__(), base2.__hash__()) - - def testHashIgnoresAction(self): - base1 = GenericTrapFilterDefinition(0, "include", "1") - base2 = GenericTrapFilterDefinition(0, "exclude", "1") - self.assert_(base1.__hash__() == base2.__hash__()) - - -class TrapFilterTest(BaseTestCase): - def testValidateOIDForGlob(self): - filter = TrapFilter() - results = filter._validateOID("*") - self.assertEquals(results, None) - - results = filter._validateOID("1.2.*") - self.assertEquals(results, None) - - def testValidateOIDFailsForEmptyString(self): - filter = TrapFilter() - results = filter._validateOID("") - self.assertEquals(results, "Empty OID is invalid") - - def testValidateOIDFailsForSimpleNumber(self): - filter = TrapFilter() - results = filter._validateOID("123") - self.assertEquals(results, "At least one '.' required") - - def testValidateOIDFailsForInvalidChars(self): - filter = TrapFilter() - results = filter._validateOID("1.2.3-5.*") - self.assertEquals(results, "Invalid character found; only digits, '.' and '*' allowed") - - def testValidateOIDFailsForDoubleDots(self): - filter = TrapFilter() - results = filter._validateOID("1.2..3") - self.assertEquals(results, "Consecutive '.'s not allowed") - - def testValidateOIDFailsForInvalidGlobbing(self): - filter = TrapFilter() - results = filter._validateOID("1.2.3.*.5.*") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("1.*.5") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("1.5*") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("*.") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("*.1") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("*.*") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("5*") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID("*5") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - results = filter._validateOID(".*") - self.assertEquals(results, "When using '*', only a single '*' at the end of OID is allowed") - - def testParseFilterDefinitionForEmptyLine(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("", 99) - self.assertEquals(results, "Incomplete filter definition") - - def testParseFilterDefinitionForIncompleteLine(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("a b", 99) - self.assertEquals(results, "Incomplete filter definition") - - def testParseFilterDefinitionForInvalidAction(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("invalid V1 ignored", 99) - self.assertEquals(results, "Invalid action 'invalid'; the only valid actions are 'include' or 'exclude'") - - def testParseFilterDefinitionForInvalidVersion(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("include V3 ignored", 99) - self.assertEquals(results, "Invalid SNMP version 'V3'; the only valid versions are 'v1' or 'v2'") - - def testParseFilterDefinitionForInvalidV1Definition(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("include V1 .", 99) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - def testParseFilterDefinitionForCaseInsensitiveDefinition(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("InClude v1 3", 99) - self.assertEquals(results, None) - - def testParseFilterDefinitionForValidV1Definition(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("include V1 3", 99) - self.assertEquals(results, None) - - def testParseFilterDefinitionForInvalidV2Definition(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("include V2 .", 99) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - def testParseFilterDefinitionForValidV2Definition(self): - filter = TrapFilter() - results = filter._parseFilterDefinition("include V2 .1.3.6.1.4.*", 99) - self.assertEquals(results, None) - - def testParseV1FilterDefinitionForGenericTrap(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", ["0"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 1) - self.assertEquals(len(filter._v1Filters), 0) - self.assertEquals(len(filter._v2Filters), 0) - - genericTrapDefinition = filter._v1Traps["0"] - self.assert_(genericTrapDefinition != None) - self.assertEquals(genericTrapDefinition.lineNumber, 99) - self.assertEquals(genericTrapDefinition.action, "include") - self.assertEquals(genericTrapDefinition.genericTrap, "0") - - # Now add another to make sure we can parse more than one - results = filter._parseV1FilterDefinition(100, "exclude", ["5"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 2) - self.assertEquals(len(filter._v1Filters), 0) - self.assertEquals(len(filter._v2Filters), 0) - - genericTrapDefinition = filter._v1Traps["5"] - self.assert_(genericTrapDefinition != None) - self.assertEquals(genericTrapDefinition.lineNumber, 100) - self.assertEquals(genericTrapDefinition.action, "exclude") - self.assertEquals(genericTrapDefinition.genericTrap, "5") - - def testParseV1FilterDefinitionEnterpriseSpecificTrap(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", ["1.2.3.*"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 1) - self.assertEquals(len(filter._v2Filters), 0) - - oidLevels = 4 - mapByLevel = filter._v1Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 1) - - filterDef = mapByLevel["1.2.3.*"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 99) - self.assertEquals(filterDef.action, "include") - self.assertEquals(filterDef.oid, "1.2.3.*") - self.assertEquals(filterDef.specificTrap, None) - - # Add another 4-level OID - results = filter._parseV1FilterDefinition(100, "exclude", ["1.2.3.4", "25"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 1) - self.assertEquals(len(filter._v2Filters), 0) - - mapByLevel = filter._v1Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 2) - - filterDef = mapByLevel["1.2.3.4-25"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 100) - self.assertEquals(filterDef.action, "exclude") - self.assertEquals(filterDef.oid, "1.2.3.4") - self.assertEquals(filterDef.specificTrap, "25") - - # Add a different specific trap for the same OID - results = filter._parseV1FilterDefinition(101, "exclude", ["1.2.3.4", "99"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 1) - self.assertEquals(len(filter._v2Filters), 0) - - mapByLevel = filter._v1Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 3) - - filterDef = mapByLevel["1.2.3.4-99"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 101) - self.assertEquals(filterDef.action, "exclude") - self.assertEquals(filterDef.oid, "1.2.3.4") - self.assertEquals(filterDef.specificTrap, "99") - - # Add another single-level OID - results = filter._parseV1FilterDefinition(101, "exclude", ["*"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 2) - self.assertEquals(len(filter._v2Filters), 0) - - oidLevels = 1 - mapByLevel = filter._v1Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 1) - - filterDef = mapByLevel["*"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 101) - self.assertEquals(filterDef.action, "exclude") - self.assertEquals(filterDef.oid, "*") - self.assertEquals(filterDef.specificTrap, None) - - def testParseV1FilterDefinitionFailsForTooManyArgs(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", ["0", "1", "2"]) - self.assertEquals(results, "Too many fields found; at most 4 fields allowed for V1 filters") - - def testParseV1FilterDefinitionFailsForEmptyOID(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", []) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - results = filter._parseV1FilterDefinition(99, "include", [""]) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - results = filter._parseV1FilterDefinition(99, "include", ["."]) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - results = filter._parseV1FilterDefinition(99, "include", ["..."]) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - def testParseV1FilterDefinitionFailsForInvalidOID(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", ["invalidOID"]) - self.assertEquals(results, "'invalidOID' is not a valid OID: Invalid character found; only digits, '.' and '*' allowed") - - def testParseV1FilterDefinitionFailsForInvalidTrap(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", ["a"]) - self.assertEquals(results, "Invalid generic trap 'a'; must be one of 0-5") - - results = filter._parseV1FilterDefinition(99, "include", ["6"]) - self.assertEquals(results, "Invalid generic trap '6'; must be one of 0-5") - - def testParseV1FilterDefinitionFailsForConflictingTrap(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", ["1"]) - self.assertEquals(results, None) - - results = filter._parseV1FilterDefinition(100, "include", ["1"]) - self.assertEquals(results, "Generic trap '1' conflicts with previous definition at line 99") - - # Verify we find a conflict for generic traps where the action differs - results = filter._parseV1FilterDefinition(100, "exclude", ["1"]) - self.assertEquals(results, "Generic trap '1' conflicts with previous definition at line 99") - - def testParseV1FilterDefinitionFailsForConflictingOID(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", [".1.3.6.1.4.5", "2"]) - self.assertEquals(results, None) - - results = filter._parseV1FilterDefinition(100, "include", [".1.3.6.1.4.5", "2"]) - self.assertEquals(results, "OID '1.3.6.1.4.5' conflicts with previous definition at line 99") - - # Verify we find a conflict for OIDs where the action differs - results = filter._parseV1FilterDefinition(100, "exclude", [".1.3.6.1.4.5", "2"]) - self.assertEquals(results, "OID '1.3.6.1.4.5' conflicts with previous definition at line 99") - - results = filter._parseV1FilterDefinition(101, "include", [".1.3.6.1.4.*"]) - self.assertEquals(results, None) - - # Verify we find a conflict for glob-based OIDs - results = filter._parseV1FilterDefinition(102, "include", [".1.3.6.1.4.*"]) - self.assertEquals(results, "OID '1.3.6.1.4.*' conflicts with previous definition at line 101") - - # Verify we find a conflict for glob-based OIDs where the action differs - results = filter._parseV1FilterDefinition(102, "exclude", [".1.3.6.1.4.*"]) - self.assertEquals(results, "OID '1.3.6.1.4.*' conflicts with previous definition at line 101") - - def testParseV1FilterDefinitionFailsForEnterpriseSpecificGlob(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", [".1.3.6.1.4.5.*", "23"]) - self.assertEquals(results, "Specific trap not allowed with globbed OID") - - def testParseV1FilterDefinitionFailsForInvalidEnterpriseSpecificTrap(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", [".1.3.6.1.4.5", "abc"]) - self.assertEquals(results, "Specific trap 'abc' invalid; must be non-negative integer") - - results = filter._parseV1FilterDefinition(99, "include", [".1.3.6.1.4.5", "-1"]) - self.assertEquals(results, "Specific trap '-1' invalid; must be non-negative integer") - - def testParseV1FilterDefinitionFailsForMissingEnterpriseSpecificTrap(self): - filter = TrapFilter() - results = filter._parseV1FilterDefinition(99, "include", [".1.3.6.1.4.5"]) - self.assertEquals(results, "Missing specific trap number or '*'") - - def testParseV2FilterDefinition(self): - filter = TrapFilter() - results = filter._parseV2FilterDefinition(99, "include", ["1.2.3.*"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 0) - self.assertEquals(len(filter._v2Filters), 1) - - oidLevels = 4 - mapByLevel = filter._v2Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 1) - - filterDef = mapByLevel["1.2.3.*"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 99) - self.assertEquals(filterDef.action, "include") - self.assertEquals(filterDef.oid, "1.2.3.*") - - # Add another 4-level OID - results = filter._parseV2FilterDefinition(100, "exclude", ["1.2.3.4"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 0) - self.assertEquals(len(filter._v2Filters), 1) - - mapByLevel = filter._v2Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 2) - - filterDef = mapByLevel["1.2.3.4"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 100) - self.assertEquals(filterDef.action, "exclude") - self.assertEquals(filterDef.oid, "1.2.3.4") - - # Add another single-level OID - results = filter._parseV2FilterDefinition(101, "exclude", ["*"]) - self.assertEquals(results, None) - self.assertEquals(len(filter._v1Traps), 0) - self.assertEquals(len(filter._v1Filters), 0) - self.assertEquals(len(filter._v2Filters), 2) - - oidLevels = 1 - mapByLevel = filter._v2Filters[oidLevels] - self.assert_(mapByLevel != None) - self.assertEquals(len(mapByLevel), 1) - - filterDef = mapByLevel["*"] - self.assert_(filterDef != None) - self.assertEquals(filterDef.lineNumber, 101) - self.assertEquals(filterDef.action, "exclude") - self.assertEquals(filterDef.oid, "*") - - def testParseV2FilterDefinitionFailsForTooManyArgs(self): - filter = TrapFilter() - results = filter._parseV2FilterDefinition(99, "include", ["0", "1"]) - self.assertEquals(results, "Too many fields found; at most 3 fields allowed for V2 filters") - - def testParseV2FilterDefinitionFailsForEmptyOID(self): - filter = TrapFilter() - results = filter._parseV2FilterDefinition(99, "include", []) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - results = filter._parseV2FilterDefinition(99, "include", [""]) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - results = filter._parseV2FilterDefinition(99, "include", ["."]) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - results = filter._parseV2FilterDefinition(99, "include", ["..."]) - self.assertEquals(results, "'' is not a valid OID: Empty OID is invalid") - - def testParseV2FilterDefinitionFailsForInvalidOID(self): - filter = TrapFilter() - results = filter._parseV2FilterDefinition(99, "include", ["invalidOID"]) - self.assertEquals(results, "'invalidOID' is not a valid OID: Invalid character found; only digits, '.' and '*' allowed") - - def testParseV2FilterDefinitionFailsForConflictingOID(self): - filter = TrapFilter() - results = filter._parseV2FilterDefinition(99, "include", [".1.3.6.1.4.5"]) - self.assertEquals(results, None) - - results = filter._parseV2FilterDefinition(100, "include", [".1.3.6.1.4.5"]) - self.assertEquals(results, "OID '1.3.6.1.4.5' conflicts with previous definition at line 99") - - # Verify we find a conflict for OIDs where the action differs - results = filter._parseV2FilterDefinition(100, "exclude", [".1.3.6.1.4.5"]) - self.assertEquals(results, "OID '1.3.6.1.4.5' conflicts with previous definition at line 99") - - results = filter._parseV2FilterDefinition(101, "include", [".1.3.6.1.4.*"]) - self.assertEquals(results, None) - - # Verify we find a conflict for glob-based OIDs - results = filter._parseV2FilterDefinition(102, "include", [".1.3.6.1.4.*"]) - self.assertEquals(results, "OID '1.3.6.1.4.*' conflicts with previous definition at line 101") - - # Verify we find a conflict for glob-based OIDs where the action differs - results = filter._parseV2FilterDefinition(102, "exclude", [".1.3.6.1.4.*"]) - self.assertEquals(results, "OID '1.3.6.1.4.*' conflicts with previous definition at line 101") - - def testDropV1EventForGenericTrapInclusion(self): - genericTrap = 0 - filterDef = GenericTrapFilterDefinition(99, "include", genericTrap) - filter = TrapFilter() - filter._v1Traps[genericTrap] = filterDef - - event = {"snmpVersion": "1", "snmpV1GenericTrapType": genericTrap} - self.assertFalse(filter._dropV1Event(event)) - - def testDropV1EventForGenericTrapForExclusion(self): - genericTrap = 1 - filterDef = GenericTrapFilterDefinition(99, "exclude", genericTrap) - filter = TrapFilter() - filter._v1Traps[genericTrap] = filterDef - - event = {"snmpVersion": "1", "snmpV1GenericTrapType": genericTrap} - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForGenericTrapForNoMatch(self): - genericTrap = 1 - filterDef = GenericTrapFilterDefinition(99, "exclude", genericTrap) - filter = TrapFilter() - filter._v1Traps[genericTrap] = filterDef - - event = {"snmpVersion": "1", "snmpV1GenericTrapType": 2} - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForEnterpriseSimpleGlobMatch(self): - filterDef = V1FilterDefinition(99, "exclude", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[4] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1.2.3.4" - } - self.assertTrue(filter._dropV1Event(event)) - - filterDef.action = "include" - self.assertFalse(filter._dropV1Event(event)) - - # This test uses 1 filters for each of two OID levels where the filter specifies a glob match - def testDropV1EventForSimpleGlobMatches(self): - filterDef = V1FilterDefinition(99, "include", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[4] = filtersByLevel - - filterDef = V1FilterDefinition(99, "include", "1.2.3.4.5.*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v1Filters[6] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1.2.3.4" - } - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.99" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.99.5" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4.99" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4.5" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4.5.99" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.99.4" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.99.4.5.6" - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventIncludeAll(self): - filterDef = V1FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1" - } - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1." - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3" - self.assertFalse(filter._dropV1Event(event)) - - def testDropV1EventExcludeAll(self): - filterDef = V1FilterDefinition(99, "exclude", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1" - } - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3" - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventExcludeAllBut(self): - filterDef = V1FilterDefinition(99, "exclude", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - filterDef = V1FilterDefinition(99, "include", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v1Filters[4] = filtersByLevel - - filterDef = V1FilterDefinition(99, "include", "1.4.5") - filterDef.specificTrap = "*" - filtersByLevel = {"1.4.5-*": filterDef} - filter._v1Filters[3] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1" - } - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.4.5.1" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.4.5" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.4.5" - event["snmpV1SpecificTrap"] = 23 - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4.5" - self.assertFalse(filter._dropV1Event(event)) - - def testDropV1EventIncludeAllBut(self): - filterDef = V1FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - filterDef = V1FilterDefinition(99, "exclude", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v1Filters[4] = filtersByLevel - - filterDef = V1FilterDefinition(99, "exclude", "1.4.5") - filterDef.specificTrap = "*" - filtersByLevel = {"1.4.5-*": filterDef} - filter._v1Filters[3] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1" - } - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.4.5.1" - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.4.5" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4.5" - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForInvalidGenericTrap(self): - filterDef = V1FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 9, - "snmpV1Enterprise": "1.2" - } - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForMissingGenericTrap(self): - filterDef = V1FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1Enterprise": "1.2" - } - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForMissingEnterpriseOID(self): - filterDef = V1FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - } - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForEnterpriseAllExcept(self): - filterDef = V1FilterDefinition(99, "include", "1.2.3") - filterDef.specificTrap = "*" - filtersByLevel = {"1.2.3-*": filterDef} - filter = TrapFilter() - filter._v1Filters[3] = filtersByLevel - - filterDef = V1FilterDefinition(99, "exclude", "1.2.3") - filterDef.specificTrap = "59" - filtersByLevel["1.2.3-59"] = filterDef - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1.2.3", - "snmpV1SpecificTrap": 59 - } - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1SpecificTrap"] = 99 - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2" - self.assertTrue(filter._dropV1Event(event)) - - def testDropV1EventForEnterpriseSpecific(self): - filterDef = V1FilterDefinition(99, "include", "1.2.3") - filterDef.specificTrap = "59" - filtersByLevel = {"1.2.3-59": filterDef} - filter = TrapFilter() - filter._v1Filters[3] = filtersByLevel - - filterDef = V1FilterDefinition(99, "include", "1.2.3") - filterDef.specificTrap = "60" - filtersByLevel["1.2.3-60"] = filterDef - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1.2.3", - "snmpV1SpecificTrap": 59 - } - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1SpecificTrap"] = 60 - self.assertFalse(filter._dropV1Event(event)) - - event["snmpV1SpecificTrap"] = 1 - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2.3.4" - self.assertTrue(filter._dropV1Event(event)) - - event["snmpV1Enterprise"] = "1.2" - self.assertTrue(filter._dropV1Event(event)) - - def testDropV2EventForSimpleExactMatch(self): - filterDef = V2FilterDefinition(99, "exclude", "1.2.3.4") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[4] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1.2.3.4"} - self.assertTrue(filter._dropV2Event(event)) - - filterDef.action = "include" - self.assertFalse(filter._dropV2Event(event)) - - def testDropV2EventForSimpleGlobMatch(self): - filterDef = V2FilterDefinition(99, "exclude", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[4] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1.2.3.4"} - self.assertTrue(filter._dropV2Event(event)) - - filterDef.action = "include" - self.assertFalse(filter._dropV2Event(event)) - - # This test uses 1 filters for each of two OID levels where the filter specifies an exact match - def testDropV2EventForSimpleExactMatches(self): - filterDef = V2FilterDefinition(99, "include", "1.2.3") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[3] = filtersByLevel - - filterDef = V2FilterDefinition(99, "include", "1.2.3.4") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[4] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1.2.3"} - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4" - self.assertFalse(filter._dropV2Event(event)) - - # OIDs with fewer or more levels than the existing filters should NOT match - event["oid"] = "1.2" - self.assertTrue(filter._dropV2Event(event)) - event["oid"] = "1.2.3.4.9" - self.assertTrue(filter._dropV2Event(event)) - - # OIDs that differ only in the last level should NOT match - event["oid"] = "1.2.9" - self.assertTrue(filter._dropV2Event(event)) - event["oid"] = "1.2.3.9" - self.assertTrue(filter._dropV2Event(event)) - - # This test uses 1 filters for each of two OID levels where the filter specifies a glob match - def testDropV2EventForSimpleGlobMatches(self): - filterDef = V2FilterDefinition(99, "include", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[4] = filtersByLevel - - filterDef = V2FilterDefinition(99, "include", "1.2.3.4.5.*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[6] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1.2.3.4"} - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.99" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.99.5" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4.99" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4.5" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4.5.99" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.3" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.99.4" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.99.4.5.6" - self.assertTrue(filter._dropV2Event(event)) - - def testDropV2EventIncludeAll(self): - filterDef = V2FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[1] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1"} - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1." - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3" - self.assertFalse(filter._dropV2Event(event)) - - def testDropV2EventExcludeAll(self): - filterDef = V2FilterDefinition(99, "exclude", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[1] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1"} - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.3" - self.assertTrue(filter._dropV2Event(event)) - - def testDropV2EventExcludeAllBut(self): - filterDef = V2FilterDefinition(99, "exclude", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[1] = filtersByLevel - - filterDef = V2FilterDefinition(99, "include", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[4] = filtersByLevel - - filterDef = V2FilterDefinition(99, "include", "1.4.5") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[3] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1"} - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.3" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.4.5.1" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.4.5" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4.5" - self.assertFalse(filter._dropV2Event(event)) - - def testDropV2EventIncludeAllBut(self): - filterDef = V2FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[1] = filtersByLevel - - filterDef = V2FilterDefinition(99, "exclude", "1.2.3.*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[4] = filtersByLevel - - filterDef = V2FilterDefinition(99, "exclude", "1.4.5") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[3] = filtersByLevel - - event = {"snmpVersion": "2", "oid": "1"} - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.2.3" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.4.5.1" - self.assertFalse(filter._dropV2Event(event)) - - event["oid"] = "1.4.5" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4" - self.assertTrue(filter._dropV2Event(event)) - - event["oid"] = "1.2.3.4.5" - self.assertTrue(filter._dropV2Event(event)) - - def testDropEvent(self): - filterDef = V1FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v1Filters[1] = filtersByLevel - - filterDef = V2FilterDefinition(99, "include", "*") - filtersByLevel = {filterDef.oid: filterDef} - filter._v2Filters[1] = filtersByLevel - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1.2.3", - "snmpV1SpecificTrap": 59 - } - self.assertFalse(filter._dropEvent(event)) - - event = { - "snmpVersion": "2", - "oid": "1.2.3", - } - self.assertFalse(filter._dropEvent(event)) - - event["snmpVersion"] = "invalidVersion" - self.assertTrue(filter._dropEvent(event)) - - def testTransformPassesV1Event(self): - filterDef = V1FilterDefinition(99, "include", "1.2.3") - filterDef.specificTrap = "59" - filtersByLevel = {"1.2.3-59": filterDef} - filter = TrapFilter() - filter._v1Filters[3] = filtersByLevel - filter._filtersDefined = True - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": filterDef.oid, - "snmpV1SpecificTrap": filterDef.specificTrap - } - self.assertEquals(TRANSFORM_CONTINUE, filter.transform(event)) - - def testTransformDropsV1Event(self): - filterDef = V1FilterDefinition(99, "exclude", "1.2.3") - filterDef.specificTrap = "59" - filtersByLevel = {"1.2.3-59": filterDef} - filter = TrapFilter() - filter._v1Filters[3] = filtersByLevel - filter._filtersDefined = True - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": filterDef.oid, - "snmpV1SpecificTrap": filterDef.specificTrap - } - self.assertEquals(TRANSFORM_DROP, filter.transform(event)) - - def testTransformPassesV2Event(self): - filterDef = V2FilterDefinition(99, "include", "1.2.3") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[3] = filtersByLevel - filter._filtersDefined = True - - event = { - "snmpVersion": "2", - "oid": filterDef.oid, - } - self.assertEquals(TRANSFORM_CONTINUE, filter.transform(event)) - - def testTransformDropsV2Event(self): - filterDef = V2FilterDefinition(99, "exclude", "1.2.3") - filtersByLevel = {filterDef.oid: filterDef} - filter = TrapFilter() - filter._v2Filters[3] = filtersByLevel - filter._filtersDefined = True - - event = { - "snmpVersion": "2", - "oid": filterDef.oid, - } - self.assertEquals(TRANSFORM_DROP, filter.transform(event)) - - def testTransformWithoutFilters(self): - filter = TrapFilter() - filter._filtersDefined = False - - event = { - "snmpVersion": "1", - "snmpV1GenericTrapType": 6, - "snmpV1Enterprise": "1.2.3", - "snmpV1SpecificTrap": 59 - } - self.assertEquals(TRANSFORM_CONTINUE, filter.transform(event)) - - event = { - "snmpVersion": "2", - "oid": "1.2.3", - } - self.assertEquals(TRANSFORM_CONTINUE, filter.transform(event)) - -def test_suite(): - from unittest import TestSuite, makeSuite - suite = TestSuite() - suite.addTest(makeSuite(OIDBasedFilterDefinitionTest)) - suite.addTest(makeSuite(GenericTrapFilterDefinitionTest)) - suite.addTest(makeSuite(TrapFilterTest)) - return suite diff --git a/Products/ZenEvents/tests/test_zeneventd.py b/Products/ZenEvents/tests/test_zeneventd.py index c36108a699..10c1f4e607 100644 --- a/Products/ZenEvents/tests/test_zeneventd.py +++ b/Products/ZenEvents/tests/test_zeneventd.py @@ -2,34 +2,37 @@ from mock import patch, call, Mock, MagicMock from Products.ZenEvents.zeneventd import ( + CheckInputPipe, + Event, + EventContext, + EventPipelineProcessor, + time, Timeout, TimeoutError, - EventPipelineProcessor, - Event, ZepRawEvent, - CheckInputPipe, - EventContext, - time ) from Products.ZenEvents.events2.processing import EventProcessorPipe from zenoss.protocols.protobufs.zep_pb2 import EventActor, EventSeverity from zenoss.protocols.protobufs.model_pb2 import ModelElementType -PATH = {'zeneventd': 'Products.ZenEvents.zeneventd'} +PATH = {"zeneventd": "Products.ZenEvents.zeneventd"} class EventPipelineProcessorTest(TestCase): - def setUp(self): self.dmd = Mock() + self.log_patcher = patch( + "{zeneventd}.log".format(**PATH), autospec=True + ) self.manager_patcher = patch( - '{zeneventd}.Manager'.format(**PATH), autospec=True + "{zeneventd}.Manager".format(**PATH), autospec=True ) # silence 'new thread' error self.metric_reporter_patcher = patch( - '{zeneventd}.MetricReporter'.format(**PATH), autospec=True + "{zeneventd}.MetricReporter".format(**PATH), autospec=True ) + self.log_patcher.start() self.manager_patcher.start() self.metric_reporter_patcher.start() @@ -46,7 +49,7 @@ def setUp(self): element_sub_type_id=ModelElementType.COMPONENT, element_sub_identifier="zeneventd", ), - summary='Event Summary', + summary="Event Summary", severity=EventSeverity.SEVERITY_DEBUG, event_key="RMMonitor.collect.docker", agent="zenpython", @@ -55,11 +58,12 @@ def setUp(self): ) def tearDown(self): + self.log_patcher.stop() self.manager_patcher.stop() self.metric_reporter_patcher.stop() def test_processMessage(self): - self.epp._pipes = (CheckInputPipe(self.epp._manager), ) + self.epp._pipes = (CheckInputPipe(self.epp._manager),) zep_raw_event = self.epp.processMessage(self.message) @@ -69,7 +73,7 @@ def test_processMessage(self): def test_exception_in_pipe(self): error_pipe = self.ErrorPipe(self.epp._manager) - self.epp._pipes = (error_pipe, ) + self.epp._pipes = (error_pipe,) self.epp._pipe_timers[error_pipe.name] = MagicMock() zep_raw_event = self.epp.processMessage(self.message) @@ -82,12 +86,11 @@ def test_exception_in_pipe(self): ) self.assertEqual( - zep_raw_event.event.message, - exception_event.event.message + zep_raw_event.event.message, exception_event.event.message ) class ErrorPipe(EventProcessorPipe): - ERR = Exception('pipeline failure') + ERR = Exception("pipeline failure") def __call__(self, eventContext): raise self.ERR @@ -108,7 +111,7 @@ def test_synchronize_with_database_every_event(self): self.dmd._p_jar.sync.assert_called_once_with() def test_create_exception_event(self): - error = Exception('test exception') + error = Exception("test exception") event_context = self.epp.create_exception_event(self.message, error) self.assertIsInstance(event_context, EventContext) @@ -117,19 +120,17 @@ def test_create_exception_event(self): self.assertIsInstance(exception_event, Event) self.assertEqual( exception_event.summary, - "Internal exception processing event: Exception('test exception',)" - ) - self.assertTrue( - str(error) in exception_event.message + "Internal exception processing event: " + "Exception('test exception',)", ) + self.assertTrue(str(error) in exception_event.message) class TimeoutTest(TestCase): - def setUp(self): # Patch external dependencies self.signal_patcher = patch( - '{zeneventd}.signal'.format(**PATH), autospec=True + "{zeneventd}.signal".format(**PATH), autospec=True ) self.signal = self.signal_patcher.start() @@ -139,19 +140,17 @@ def tearDown(self): def test_context_manager(self): timeout_duration = 10 - with Timeout('event', timeout_duration) as ctx: + with Timeout("event", timeout_duration) as ctx: self.signal.signal.assert_called_with( self.signal.SIGALRM, ctx.handle_timeout ) - self.signal.alarm.assert_has_calls( - [call(timeout_duration), call(0)] - ) + self.signal.alarm.assert_has_calls([call(timeout_duration), call(0)]) def test_handle_timeout_raises_exception(self): with self.assertRaises(TimeoutError): with Timeout(1) as ctx: - ctx.handle_timeout(1, 'frame') + ctx.handle_timeout(1, "frame") class BaseQueueConsumerTaskTest(TestCase): diff --git a/Products/ZenEvents/tests/test_zentrap.py b/Products/ZenEvents/tests/test_zentrap.py deleted file mode 100644 index c9e0a2c6a6..0000000000 --- a/Products/ZenEvents/tests/test_zentrap.py +++ /dev/null @@ -1,1078 +0,0 @@ -import base64 -import logging - -from struct import pack -from unittest import TestCase - -from Products.ZenEvents.zentrap import ( - decode_snmp_value, TrapTask, FakePacket, SNMPv1, SNMPv2, - LEGACY_VARBIND_COPY_MODE, DIRECT_VARBIND_COPY_MODE, MIXED_VARBIND_COPY_MODE -) - -log = logging.getLogger("test_zentrap") - - -class DecodersUnitTest(TestCase): - - def setUp(self): - logging.disable(logging.CRITICAL) - - def tearDown(self): - logging.disable(logging.NOTSET) - - def test_decode_oid(self): - value = (1, 2, 3, 4) - self.assertEqual( - decode_snmp_value(value), - "1.2.3.4" - ) - - def test_decode_utf8(self): - value = 'valid utf8 string \xc3\xa9'.encode('utf8') - self.assertEqual( - decode_snmp_value(value), - u'valid utf8 string \xe9'.decode('utf8') - ) - - def test_decode_datetime(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), - '2017-12-20T11:50:50.800+06:05' - ) - - def test_decode_bad_timezone(self): - value = pack(">HBBBBBBBBB", 2017, 12, 20, 11, 50, 50, 8, 0, 0, 0) - dttm = decode_snmp_value(value) - self.assertEqual(dttm[:23], "2017-12-20T11:50:50.800") - self.assertRegexpMatches( - dttm[23:], "^[+-][01][0-9]:[0-5][0-9]$" - ) - - def test_decode_invalid_timezone(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 50, 8, '=', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_incomplete_datetime(self): - value = pack(">HBBBBBB", 2017, 12, 20, 11, 50, 50, 8) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_month_high(self): - value = pack(">HBBBBBBsBB", 2017, 13, 20, 11, 50, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_month_low(self): - value = pack(">HBBBBBBsBB", 2017, 0, 20, 11, 50, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_day_high(self): - value = pack(">HBBBBBBsBB", 2017, 12, 32, 11, 50, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_day_low(self): - value = pack(">HBBBBBBsBB", 2017, 12, 0, 11, 50, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_hour(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 24, 50, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_minute(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 60, 50, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_bad_second(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 61, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_leap_second(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 60, 8, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), '2017-12-20T11:50:60.800+06:05' - ) - - def test_decode_bad_decisecond(self): - value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 50, 10, '+', 6, 5) - self.assertEqual( - decode_snmp_value(value), "BASE64:" + base64.b64encode(value) - ) - - def test_decode_value_ipv4(self): - value = '\xcc\x0b\xc8\x01' - self.assertEqual( - decode_snmp_value(value), - '204.11.200.1' - ) - - def test_decode_value_ipv6(self): - value = 'Z\xef\x00+\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08' - self.assertEqual( - decode_snmp_value(value), - '5aef:2b::8' - ) - - def test_decode_long_values(self): - value = long(555) - self.assertEqual( - decode_snmp_value(value), - int(555) - ) - - def test_decode_int_values(self): - value = int(555) - self.assertEqual( - decode_snmp_value(value), - int(555) - ) - - def test_encode_invalid_chars(self): - value = '\xde\xad\xbe\xef\xfe\xed\xfa\xce' - self.assertEqual( - decode_snmp_value(value), - 'BASE64:3q2+7/7t+s4=' - ) - - def test_decode_unexpected_object_type(self): - value = object() - self.assertEqual( - decode_snmp_value(value), - None - ) - - -class MockTrapTask(TrapTask): - - def __init__(self, oidMap, varbindCopyMode=DIRECT_VARBIND_COPY_MODE): - self.oidMap = oidMap - self.log = log - if varbindCopyMode is not None: - self.varbindCopyMode = varbindCopyMode - else: - self.varbindCopyMode = MIXED_VARBIND_COPY_MODE - processor_class = TrapTask._varbind_processors.get(self.varbindCopyMode) - self._process_varbinds = processor_class(self.oid2name) - - -class TestOid2Name(TestCase): - - def test_NoExactMatch(self): - oidMap = {} - task = MockTrapTask(oidMap) - self.assertEqual(task.oid2name(".1.2.3.4"), "1.2.3.4") - self.assertEqual(task.oid2name(".1.2.3.4", strip=True), "1.2.3.4") - - def test_HasExactMatch(self): - oidMap = {"1.2.3.4": "Zenoss.Test.exactMatch"} - task = MockTrapTask(oidMap) - result = task.oid2name(".1.2.3.4") - self.assertEqual(result, "Zenoss.Test.exactMatch") - result = task.oid2name(".1.2.3.4", strip=True) - self.assertEqual(result, "Zenoss.Test.exactMatch") - - def test_NoInexactMatch(self): - oidMap = {"1.2.3.4": "Zenoss.Test.exactMatch"} - task = MockTrapTask(oidMap) - result = task.oid2name(".1.5.3.4", exactMatch=False) - self.assertEqual(result, "1.5.3.4") - - def test_HasInexactMatchNotStripped(self): - oidMap = { - "1.2": "Zenoss", - "1.2.3": "Zenoss.Test", - "1.2.3.2": "Zenoss.Test.inexactMatch" - } - task = MockTrapTask(oidMap) - result = task.oid2name(".1.2.3.2.5", exactMatch=False) - self.assertEqual(result, "Zenoss.Test.inexactMatch.5") - result = task.oid2name(".1.2.3.2.5.6", exactMatch=False) - self.assertEqual(result, "Zenoss.Test.inexactMatch.5.6") - - def test_HasInexactMatchStripped(self): - oidMap = { - "1.2": "Zenoss", - "1.2.3": "Zenoss.Test", - "1.2.3.2": "Zenoss.Test.inexactMatch" - } - task = MockTrapTask(oidMap) - result = task.oid2name(".1.2.3.2.5", exactMatch=False, strip=True) - self.assertEqual(result, "Zenoss.Test.inexactMatch") - result = task.oid2name(".1.2.3.2.5.6", exactMatch=False, strip=True) - self.assertEqual(result, "Zenoss.Test.inexactMatch") - - def test_AcceptsTuple(self): - oidMap = {} - task = MockTrapTask(oidMap) - self.assertEqual(task.oid2name((1, 2, 3, 4)), "1.2.3.4") - - -class _SnmpV1Base(object): - - def makeInputs(self, trapType=6, oidMap={}, variables=(), varbindCopyMode=None): - pckt = FakePacket() - pckt.version = SNMPv1 - pckt.host = "localhost" - pckt.port = 162 - pckt.variables = variables - pckt.community = "" - pckt.enterprise_length = 0 - - # extra fields for SNMPv1 packets - pckt.agent_addr = [192, 168, 24, 4] - pckt.trap_type = trapType - pckt.specific_type = 5 - pckt.enterprise = "1.2.3.4" - pckt.enterprise_length = len(pckt.enterprise) - pckt.community = "community" - - return pckt, MockTrapTask(oidMap, varbindCopyMode) - - -class TestDecodeSnmpV1(TestCase, _SnmpV1Base): - - def test_NoAgentAddr(self): - pckt, task = self.makeInputs() - del pckt.agent_addr - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(result["device"], "localhost") - - def test_FieldsNoMappingUsed(self): - pckt, task = self.makeInputs() - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - - self.assertEqual(result["device"], "192.168.24.4") - self.assertEqual(result["snmpVersion"], "1") - self.assertEqual(result["snmpV1Enterprise"], "1.2.3.4") - self.assertEqual(result["snmpV1GenericTrapType"], 6) - self.assertEqual(result["snmpV1SpecificTrap"], 5) - self.assertEqual(eventType, "1.2.3.4.5") - self.assertEqual(result["oid"], "1.2.3.4.5") - - def test_EnterpriseOIDWithExtraZero(self): - pckt, task = self.makeInputs(oidMap={"1.2.3.4.0.5": "testing"}) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "testing") - self.assertEqual(result["oid"], "1.2.3.4.0.5") - - def test_TrapType0(self): - pckt, task = self.makeInputs(trapType=0) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "coldStart") - self.assertEqual(result["snmpV1GenericTrapType"], 0) - - def test_TrapType1(self): - pckt, task = self.makeInputs(trapType=1) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "warmStart") - self.assertEqual(result["snmpV1GenericTrapType"], 1) - - def test_TrapType2(self): - pckt, task = self.makeInputs(trapType=2) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "snmp_linkDown") - self.assertEqual(result["snmpV1GenericTrapType"], 2) - - def test_TrapType3(self): - pckt, task = self.makeInputs(trapType=3) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "snmp_linkUp") - self.assertEqual(result["snmpV1GenericTrapType"], 3) - - def test_TrapType4(self): - pckt, task = self.makeInputs(trapType=4) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "authenticationFailure") - self.assertEqual(result["snmpV1GenericTrapType"], 4) - - def test_TrapType5(self): - pckt, task = self.makeInputs(trapType=5) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "egpNeighorLoss") - self.assertEqual(result["snmpV1GenericTrapType"], 5) - - def test_TrapType6(self): - pckt, task = self.makeInputs(trapType=6) - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - self.assertEqual(eventType, "1.2.3.4.5") - self.assertEqual(result["snmpV1GenericTrapType"], 6) - - -class _SnmpV2Base(object): - - baseOidMap = { - # Std var binds in SnmpV2 traps/notifications - "1.3.6.1.2.1.1.3": "sysUpTime", - "1.3.6.1.6.3.1.1.4.1": "snmpTrapOID", - - # SnmpV2 Traps (snmpTrapOID.0 values) - "1.3.6.1.6.3.1.1.5.1": "coldStart", - "1.3.6.1.6.3.1.1.5.2": "warmStart", - "1.3.6.1.6.3.1.1.5.3": "linkDown", - "1.3.6.1.6.3.1.1.5.4": "linkUp", - "1.3.6.1.6.3.1.1.5.5": "authenticationFailure", - "1.3.6.1.6.3.1.1.5.6": "egpNeighborLoss", - } - - def makePacket(self, trapOID, variables=()): - pckt = FakePacket() - pckt.version = SNMPv2 - pckt.host = "localhost" - pckt.port = 162 - - if isinstance(trapOID, (str, unicode)): - trapOID = tuple(map(int, trapOID.split('.'))) - pckt.variables = [ - ((1, 3, 6, 1, 2, 1, 1, 3, 0), 5342), - ((1, 3, 6, 1, 6, 3, 1, 1, 4, 1, 0), trapOID) - ] - pckt.variables.extend(variables) - pckt.community = "public" - pckt.enterprise_length = 0 - return pckt - - def makeTask(self, extraOidMap={}, varbindCopyMode=None): - oidMap = self.baseOidMap.copy() - oidMap.update(extraOidMap) - return MockTrapTask(oidMap, varbindCopyMode) - - def makeInputs( - self, trapOID="1.3.6.1.6.3.1.1.5.1", variables=(), oidMap={}, varbindCopyMode=None - ): - pckt = self.makePacket(trapOID=trapOID, variables=variables) - task = self.makeTask(extraOidMap=oidMap, varbindCopyMode=varbindCopyMode) - return pckt, task - - -class TestDecodeSnmpV2OrV3(TestCase, _SnmpV2Base): - - def test_UnknownTrapType(self): - pckt, task = self.makeInputs(trapOID="1.2.3") - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - self.assertIn("snmpVersion", result) - self.assertEqual(result["snmpVersion"], "2") - self.assertEqual(eventType, "1.2.3") - self.assertIn("snmpVersion", result) - self.assertIn("oid", result) - self.assertIn("device", result) - self.assertEqual(result["snmpVersion"], "2") - self.assertEqual(result["oid"], "1.2.3") - self.assertEqual(result["device"], "localhost") - - def test_KnownTrapType(self): - pckt, task = self.makeInputs(trapOID="1.3.6.1.6.3.1.1.5.1") - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - self.assertIn("oid", result) - self.assertEqual(eventType, "coldStart") - self.assertEqual(result["oid"], "1.3.6.1.6.3.1.1.5.1") - - def test_TrapAddressOID(self): - pckt, task = self.makeInputs( - trapOID="1.3.6.1.6.3.1.1.5.1", - variables=( - ((1, 3, 6, 1, 6, 3, 18, 1, 3), "192.168.51.100"), - ), - oidMap={ - "1.3.6.1.6.3.18.1.3": "snmpTrapAddress" - } - ) - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - self.assertIn("snmpTrapAddress", result) - self.assertEqual(result["snmpTrapAddress"], "192.168.51.100") - self.assertEqual(result["device"], "192.168.51.100") - - def test_RenamedLinkDown(self): - pckt, task = self.makeInputs(trapOID="1.3.6.1.6.3.1.1.5.3") - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - self.assertIn("oid", result) - self.assertEqual(eventType, "snmp_linkDown") - self.assertEqual(result["oid"], "1.3.6.1.6.3.1.1.5.3") - - def test_RenamedLinkUp(self): - pckt, task = self.makeInputs(trapOID="1.3.6.1.6.3.1.1.5.4") - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - self.assertIn("oid", result) - self.assertEqual(eventType, "snmp_linkUp") - self.assertEqual(result["oid"], "1.3.6.1.6.3.1.1.5.4") - - def test_PartialNamedVarBindNoneValue(self): - pckt = self.makePacket("1.3.6.1.6.3.1.1.5.3") - pckt.variables.append( - ((1, 2, 6, 0), None), - ) - task = MockTrapTask({"1.2.6.0": "testVar"}) - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - totalVarKeys = sum(1 for k in result if k.startswith("testVar")) - self.assertEqual(totalVarKeys, 1) - self.assertIn("testVar", result) - self.assertEqual(result["testVar"], "None") - - -class _VarbindTests(object): - - def case_unknown_id_single(self): - variables = (((1, 2, 6, 7), "foo"),) - tests = ( - self.makeInputs( - variables=variables, - varbindCopyMode=MIXED_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE - ), - ) - for test in tests: - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - totalVarKeys = sum(1 for k in result if k.startswith("1.2.6")) - self.assertEqual(totalVarKeys, 1) - self.assertEqual(result["1.2.6.7"], "foo") - - def case_unknown_id_repeated(self): - variables = ( - ((1, 2, 6, 7), "foo"), - ((1, 2, 6, 7), "bar"), - ((1, 2, 6, 7), "baz"), - ) - tests = ( - self.makeInputs( - variables=variables, - varbindCopyMode=MIXED_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE - ), - ) - for test in tests: - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - totalVarKeys = sum(1 for k in result if k.startswith("1.2.6")) - self.assertEqual(totalVarKeys, 1) - self.assertEqual(result["1.2.6.7"], "foo,bar,baz") - - def case_unknown_ids_multiple(self): - variables = ( - ((1, 2, 6, 0), "foo"), - ((1, 2, 6, 1), "bar"), - ) - tests = ( - self.makeInputs( - variables=variables, - varbindCopyMode=MIXED_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE - ), - ) - expected_results = ({ - "1.2.6.0": "foo", - "1.2.6.1": "bar", - }, { - "1.2.6.0": "foo", - "1.2.6.1": "bar", - }, { - "1.2.6.0": "foo", - "1.2.6.1": "bar", - },) - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - totalVarKeys = sum(1 for k in result if k.startswith("1.2.6")) - self.assertEqual(totalVarKeys, 2) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_one_id(self): - variables = (((1, 2, 6, 7), "foo"),) - oidMap = {"1.2.6.7": "testVar"} - tests = ( - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE - ), - ) - expected_results = ({ - "testVar": "foo", - }, { - "testVar": "foo", - }, { - "testVar": "foo", - },) - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - totalVarKeys = sum(1 for k in result if k.startswith("testVar")) - self.assertEqual(totalVarKeys, 1) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_one_id_one_sub_id(self): - oidMap = {"1.2.6": "testVar"} - variables = (((1, 2, 6, 5), "foo"),) - tests = ( - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE - ), - ) - expected_results = ({ - "testVar": "foo", - "testVar.ifIndex": "5", - }, { - "testVar.5": "foo", - "testVar.sequence": "5", - }, { - "testVar": "foo", - "testVar.ifIndex": "5", - },) - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - count = sum(1 for k in result if k.startswith("testVar")) - self.assertEqual(count, len(expected.keys())) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_one_id_multiple_sub_ids(self): - oidMap = {"1.2.6": "testVar"} - variables_one = ( - ((1, 2, 6, 0), "foo"), - ((1, 2, 6, 1), "bar"), - ((1, 2, 6, 2), "baz"), - ) - variables_two = ( - ((1, 2, 6, 3), "foo"), - ((1, 2, 6, 3), "bar"), - ) - - tests = ( - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - ) - expected_results = ({ - "testVar.0": "foo", - "testVar.1": "bar", - "testVar.2": "baz", - "testVar.sequence": "0,1,2", - }, { - "testVar.3": "foo,bar", - "testVar.sequence": "3,3", - }, { - "testVar.0": "foo", - "testVar.1": "bar", - "testVar.2": "baz", - "testVar.sequence": "0,1,2", - }, { - "testVar.3": "foo,bar", - "testVar.sequence": "3,3", - }, { - "testVar": "foo,bar,baz", - "testVar.ifIndex": "0,1,2", - }, { - "testVar": "foo,bar", - "testVar.ifIndex": "3,3", - }) - for test, expected in zip(tests, expected_results): - result = yield test - count = sum(1 for k in result if k.startswith("testVar")) - self.assertEqual(count, len(expected.keys())) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_multiple_ids(self): - oidMap = { - "1.2.6": "foo", - "1.2.7": "bar", - } - variables = ( - ((1, 2, 6), "is a foo"), - ((1, 2, 7), "lower the bar"), - ) - tests = ( - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - ) - expected_results = ({ - "foo": "is a foo", - "bar": "lower the bar", - }, { - "foo": "is a foo", - "bar": "lower the bar", - }, { - "foo": "is a foo", - "bar": "lower the bar", - },) - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - count = sum( - 1 for k in result - if k.startswith("bar") or k.startswith("foo") - ) - self.assertEqual(count, len(expected.keys())) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_multiple_ids_one_sub_id_each(self): - oidMap = { - "1.2.6": "foo", - "1.2.7": "bar", - } - variables_one = ( - ((1, 2, 6, 0), "is a foo"), - ((1, 2, 7, 2), "lower the bar"), - ) - variables_two = ( - ((1, 2, 6, 0, 1), "is a foo"), - ((1, 2, 7, 2, 1), "lower the bar"), - ) - tests = ( - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - ) - expected_results = ({ - "foo": "is a foo", - "foo.ifIndex": "0", - "bar": "lower the bar", - "bar.ifIndex": "2", - }, { - "foo": "is a foo", - "foo.ifIndex": "0.1", - "bar": "lower the bar", - "bar.ifIndex": "2.1", - }, { - "foo.0": "is a foo", - "foo.sequence": "0", - "bar.2": "lower the bar", - "bar.sequence": "2", - }, { - "foo.0.1": "is a foo", - "foo.sequence": "0.1", - "bar.2.1": "lower the bar", - "bar.sequence": "2.1", - }, { - "foo": "is a foo", - "foo.ifIndex": "0", - "bar": "lower the bar", - "bar.ifIndex": "2", - }, { - "foo": "is a foo", - "foo.ifIndex": "0.1", - "bar": "lower the bar", - "bar.ifIndex": "2.1", - },) - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - count = sum( - 1 for k in result - if k.startswith("bar") or k.startswith("foo") - ) - self.assertEqual(count, len(expected.keys())) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_multiple_ids_multiple_sub_ids(self): - oidMap = { - "1.2.6": "foo", - "1.2.7": "bar", - } - variables_one = ( - ((1, 2, 6, 0, 1), "here a foo"), - ((1, 2, 6, 1, 1), "there a foo"), - ((1, 2, 7, 2, 1), "lower the bar"), - ((1, 2, 7, 2, 2), "raise the bar"), - ) - variables_two = ( - ((1, 2, 6, 0), "here a foo"), - ((1, 2, 6, 0), "there a foo"), - ((1, 2, 7, 3), "lower the bar"), - ((1, 2, 7, 3), "raise the bar"), - ) - tests = ( - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_one, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables_two, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - ) - expected_results = ({ - "foo.0.1": "here a foo", - "foo.1.1": "there a foo", - "foo.sequence": "0.1,1.1", - "bar.2.1": "lower the bar", - "bar.2.2": "raise the bar", - "bar.sequence": "2.1,2.2", - }, { - "foo.0": "here a foo,there a foo", - "foo.sequence": "0,0", - "bar.3": "lower the bar,raise the bar", - "bar.sequence": "3,3", - }, { - "foo.0.1": "here a foo", - "foo.1.1": "there a foo", - "foo.sequence": "0.1,1.1", - "bar.2.1": "lower the bar", - "bar.2.2": "raise the bar", - "bar.sequence": "2.1,2.2", - }, { - "foo.0": "here a foo,there a foo", - "foo.sequence": "0,0", - "bar.3": "lower the bar,raise the bar", - "bar.sequence": "3,3", - }, { - "foo": "here a foo,there a foo", - "foo.ifIndex": "0.1,1.1", - "bar": "lower the bar,raise the bar", - "bar.ifIndex": "2.1,2.2", - }, { - "foo": "here a foo,there a foo", - "foo.ifIndex": "0,0", - "bar": "lower the bar,raise the bar", - "bar.ifIndex": "3,3", - },) - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - count = sum( - 1 for k in result - if k.startswith("bar") or k.startswith("foo") - ) - self.assertEqual(count, len(expected.keys())) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - def case_ifentry_trap(self): - oidMap = { - "1.3.6.1.2.1.2.2.1.1": "ifIndex", - "1.3.6.1.2.1.2.2.1.7": "ifAdminStatus", - "1.3.6.1.2.1.2.2.1.8": "ifOperStatus", - "1.3.6.1.2.1.2.2.1.2": "ifDescr", - "1.3.6.1.2.1.31.1.1.1.18": "ifAlias", - } - variables=( - ((1, 3, 6, 1, 2, 1, 2, 2, 1, 1, 143), 143), - ((1, 3, 6, 1, 2, 1, 2, 2, 1, 7, 143), 2), - ((1, 3, 6, 1, 2, 1, 2, 2, 1, 8, 143), 2), - ((1, 3, 6, 1, 2, 1, 2, 2, 1, 2, 143), "F23"), - ((1, 3, 6, 1, 2, 1, 31, 1, 1, 1, 18, 143), ""), - ) - tests = ( - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=MIXED_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=DIRECT_VARBIND_COPY_MODE, - ), - self.makeInputs( - variables=variables, - oidMap=oidMap, - varbindCopyMode=LEGACY_VARBIND_COPY_MODE, - ), - ) - expected_results = ({ - "ifIndex": "143", - "ifIndex.ifIndex": "143", - "ifAdminStatus": "2", - "ifAdminStatus.ifIndex": "143", - "ifOperStatus": "2", - "ifOperStatus.ifIndex": "143", - "ifDescr": "F23", - "ifDescr.ifIndex": "143", - "ifAlias": "", - "ifAlias.ifIndex": "143", - }, { - "ifIndex.143": "143", - "ifIndex.sequence": "143", - "ifAdminStatus.143": "2", - "ifAdminStatus.sequence": "143", - "ifOperStatus.143": "2", - "ifOperStatus.sequence": "143", - "ifDescr.143": "F23", - "ifDescr.sequence": "143", - "ifAlias.143": "", - "ifAlias.sequence": "143", - }, { - "ifIndex": "143", - "ifIndex.ifIndex": "143", - "ifAdminStatus": "2", - "ifAdminStatus.ifIndex": "143", - "ifOperStatus": "2", - "ifOperStatus.ifIndex": "143", - "ifDescr": "F23", - "ifDescr.ifIndex": "143", - "ifAlias": "", - "ifAlias.ifIndex": "143", - },) - - for test, expected in zip(tests, expected_results): - result = yield test - # eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - count = sum( - 1 for k in result - if k.startswith("ifIndex") - or k.startswith("ifAdminStatus") - or k.startswith("ifOperStatus") - or k.startswith("ifDescr") - or k.startswith("ifAlias") - ) - self.assertEqual(count, len(expected.keys())) - for key, value in expected.items(): - self.assertIn(key, result) - self.assertEqual(value, result[key]) - - -class TestSnmpV1VarbindHandling(TestCase, _SnmpV1Base, _VarbindTests): - - def _execute(self, cases): - try: - pckt, task = next(cases) - while True: - eventType, result = task.decodeSnmpv1(("localhost", 162), pckt) - pckt, task = cases.send(result) - except StopIteration: - pass - - def test_unknown_id_single(self): - self._execute(self.case_unknown_id_single()) - - def test_unknown_id_repeated(self): - self._execute(self.case_unknown_id_repeated()) - - def test_unknown_ids_multiple(self): - self._execute(self.case_unknown_ids_multiple()) - - def test_one_id(self): - self._execute(self.case_one_id()) - - def test_one_id_one_sub_id(self): - self._execute(self.case_one_id_one_sub_id()) - - def test_one_id_multiple_sub_ids(self): - self._execute(self.case_one_id_multiple_sub_ids()) - - def test_multiple_ids(self): - self._execute(self.case_multiple_ids()) - - def test_multiple_ids_one_sub_id_each(self): - self._execute(self.case_multiple_ids_one_sub_id_each()) - - def test_multiple_ids_multiple_sub_ids(self): - self._execute(self.case_multiple_ids_multiple_sub_ids()) - - def test_ifentry_trap(self): - self._execute(self.case_ifentry_trap()) - - -class TestSnmpV2VarbindHandling(TestCase, _SnmpV2Base, _VarbindTests): - - def _execute(self, cases): - try: - pckt, task = next(cases) - while True: - eventType, result = task.decodeSnmpV2OrV3(("localhost", 162), pckt) - pckt, task = cases.send(result) - except StopIteration: - pass - - def test_unknown_id_single(self): - self._execute(self.case_unknown_id_single()) - - def test_unknown_id_repeated(self): - self._execute(self.case_unknown_id_repeated()) - - def test_unknown_ids_multiple(self): - self._execute(self.case_unknown_ids_multiple()) - - def test_one_id(self): - self._execute(self.case_one_id()) - - def test_one_id_one_sub_id(self): - self._execute(self.case_one_id_one_sub_id()) - - def test_one_id_multiple_sub_ids(self): - self._execute(self.case_one_id_multiple_sub_ids()) - - def test_multiple_ids(self): - self._execute(self.case_multiple_ids()) - - def test_multiple_ids_one_sub_id_each(self): - self._execute(self.case_multiple_ids_one_sub_id_each()) - - def test_multiple_ids_multiple_sub_ids(self): - self._execute(self.case_multiple_ids_multiple_sub_ids()) - - def test_ifentry_trap(self): - self._execute(self.case_ifentry_trap()) - - -def test_suite(): - from unittest import TestSuite, makeSuite - suite = TestSuite() - suite.addTest(makeSuite(DecodersUnitTest)) - suite.addTest(makeSuite(TestOid2Name)) - suite.addTest(makeSuite(TestDecodeSnmpV1)) - suite.addTest(makeSuite(TestDecodeSnmpV2OrV3)) - suite.addTest(makeSuite(TestSnmpV1VarbindHandling)) - suite.addTest(makeSuite(TestSnmpV2VarbindHandling)) - return suite diff --git a/Products/ZenEvents/zeneventd.py b/Products/ZenEvents/zeneventd.py index f012507d59..399032dba7 100644 --- a/Products/ZenEvents/zeneventd.py +++ b/Products/ZenEvents/zeneventd.py @@ -21,22 +21,40 @@ from zenoss.protocols.interfaces import IAMQPConnectionInfo, IQueueSchema from zenoss.protocols.jsonformat import from_dict, to_dict from zenoss.protocols.protobufs.zep_pb2 import ( - STATUS_DROPPED, Event, ZepRawEvent + STATUS_DROPPED, + Event, + ZepRawEvent, ) from Products.ZenCollector.utils.maintenance import ( - MaintenanceCycle, QueueHeartbeatSender, maintenanceBuildOptions + MaintenanceCycle, + QueueHeartbeatSender, + maintenanceBuildOptions, ) from Products.ZenEvents.daemonlifecycle import ( - BuildOptionsEvent, DaemonCreatedEvent, DaemonStartRunEvent, SigTermEvent, - SigUsr1Event + BuildOptionsEvent, + DaemonCreatedEvent, + DaemonStartRunEvent, + SigTermEvent, + SigUsr1Event, ) from Products.ZenEvents.events2.processing import ( - AddDeviceContextAndTagsPipe, AssignDefaultEventClassAndTagPipe, - CheckHeartBeatPipe, CheckInputPipe, ClearClassRefreshPipe, DropEvent, - EventContext, EventPluginPipe, FingerprintPipe, IdentifierPipe, Manager, - ProcessingException, SerializeContextPipe, TransformAndReidentPipe, - TransformPipe, UpdateDeviceContextAndTagsPipe + AddDeviceContextAndTagsPipe, + AssignDefaultEventClassAndTagPipe, + CheckHeartBeatPipe, + CheckInputPipe, + ClearClassRefreshPipe, + DropEvent, + EventContext, + EventPluginPipe, + FingerprintPipe, + IdentifierPipe, + Manager, + ProcessingException, + SerializeContextPipe, + TransformAndReidentPipe, + TransformPipe, + UpdateDeviceContextAndTagsPipe, ) from Products.ZenEvents.interfaces import IPostEventPlugin, IPreEventPlugin from Products.ZenMessaging.queuemessaging.interfaces import IQueueConsumerTask @@ -51,9 +69,11 @@ def monkey_patch_rotatingfilehandler(): try: from cloghandler import ConcurrentRotatingFileHandler + logging.handlers.RotatingFileHandler = ConcurrentRotatingFileHandler except ImportError: from warnings import warn + warn( "ConcurrentLogHandler package not installed. Using" " RotatingFileLogHandler. While everything will still work fine," @@ -65,8 +85,8 @@ def monkey_patch_rotatingfilehandler(): log = logging.getLogger("zen.eventd") -EXCHANGE_ZEP_ZEN_EVENTS = '$ZepZenEvents' -QUEUE_RAW_ZEN_EVENTS = '$RawZenEvents' +EXCHANGE_ZEP_ZEN_EVENTS = "$ZepZenEvents" +QUEUE_RAW_ZEN_EVENTS = "$RawZenEvents" class EventPipelineProcessor(object): @@ -79,7 +99,7 @@ def __init__(self, dmd): self._manager = Manager(self.dmd) self._pipes = ( EventPluginPipe( - self._manager, IPreEventPlugin, 'PreEventPluginPipe' + self._manager, IPreEventPlugin, "PreEventPluginPipe" ), CheckInputPipe(self._manager), IdentifierPipe(self._manager), @@ -91,23 +111,23 @@ def __init__(self, dmd): UpdateDeviceContextAndTagsPipe(self._manager), IdentifierPipe(self._manager), AddDeviceContextAndTagsPipe(self._manager), - ] + ], ), AssignDefaultEventClassAndTagPipe(self._manager), FingerprintPipe(self._manager), SerializeContextPipe(self._manager), EventPluginPipe( - self._manager, IPostEventPlugin, 'PostEventPluginPipe' + self._manager, IPostEventPlugin, "PostEventPluginPipe" ), ClearClassRefreshPipe(self._manager), - CheckHeartBeatPipe(self._manager) + CheckHeartBeatPipe(self._manager), ) self._pipe_timers = {} for pipe in self._pipes: timer_name = pipe.name self._pipe_timers[timer_name] = Metrology.timer(timer_name) - self.reporter = MetricReporter(prefix='zenoss.zeneventd.') + self.reporter = MetricReporter(prefix="zenoss.zeneventd.") self.reporter.start() if not self.SYNC_EVERY_EVENT: @@ -133,8 +153,9 @@ class when it is done with the message eventContext = EventContext(log, zepevent) with Timeout( - zepevent, self.PROCESS_EVENT_TIMEOUT, - error_message='while processing event' + zepevent, + self.PROCESS_EVENT_TIMEOUT, + error_message="while processing event", ): for pipe in self._pipes: with self._pipe_timers[pipe.name]: @@ -142,12 +163,13 @@ class when it is done with the message if log.isEnabledFor(logging.DEBUG): # assume to_dict() is expensive. log.debug( - 'After pipe %s, event context is %s', - pipe.name, to_dict(eventContext.zepRawEvent) + "After pipe %s, event context is %s", + pipe.name, + to_dict(eventContext.zepRawEvent), ) if eventContext.event.status == STATUS_DROPPED: raise DropEvent( - 'Dropped by %s' % pipe, eventContext.event + "Dropped by %s" % pipe, eventContext.event ) except AttributeError: @@ -167,7 +189,7 @@ class when it is done with the message except Exception as error: log.info( "Failed to process event, forward original raw event: %s", - to_dict(zepevent.event) + to_dict(zepevent.event), ) # Pipes and plugins may raise ProcessingException's for their own # reasons. only log unexpected exceptions of other type @@ -179,15 +201,17 @@ class when it is done with the message if log.isEnabledFor(logging.DEBUG): # assume to_dict() is expensive. - log.debug("Publishing event: %s", to_dict(eventContext.zepRawEvent)) + log.debug( + "Publishing event: %s", to_dict(eventContext.zepRawEvent) + ) return eventContext.zepRawEvent def _synchronize_with_database(self): - '''sync() db if it has been longer than + """sync() db if it has been longer than self.syncInterval seconds since the last time, and no _synchronize has not been called for self.syncInterval seconds KNOWN ISSUE: ZEN-29884 - ''' + """ if self.SYNC_EVERY_EVENT: doSync = True else: @@ -204,23 +228,23 @@ def create_exception_event(self, message, exception): orig_zep_event = ZepRawEvent() orig_zep_event.event.CopyFrom(message) failure_event = { - 'uuid': guid.generate(), - 'created_time': int(time() * 1000), - 'fingerprint': - '|'.join(['zeneventd', 'processMessage', repr(exception)]), + "uuid": guid.generate(), + "created_time": int(time() * 1000), + "fingerprint": "|".join( + ["zeneventd", "processMessage", repr(exception)] + ), # Don't send the *same* event class or we loop endlessly - 'eventClass': '/', - 'summary': 'Internal exception processing event: %r' % exception, - 'message': - 'Internal exception processing event: %r/%s' % - (exception, to_dict(orig_zep_event.event)), - 'severity': 4, + "eventClass": "/", + "summary": "Internal exception processing event: %r" % exception, + "message": "Internal exception processing event: %r/%s" + % (exception, to_dict(orig_zep_event.event)), + "severity": 4, } zep_raw_event = ZepRawEvent() zep_raw_event.event.CopyFrom(from_dict(Event, failure_event)) event_context = EventContext(log, zep_raw_event) - event_context.eventProxy.device = 'zeneventd' - event_context.eventProxy.component = 'processMessage' + event_context.eventProxy.device = "zeneventd" + event_context.eventProxy.component = "processMessage" return event_context @@ -231,18 +255,19 @@ class BaseQueueConsumerTask(object): def __init__(self, processor): self.processor = processor self._queueSchema = getUtility(IQueueSchema) - self.dest_routing_key_prefix = 'zenoss.zenevent' + self.dest_routing_key_prefix = "zenoss.zenevent" self._dest_exchange = self._queueSchema.getExchange( EXCHANGE_ZEP_ZEN_EVENTS ) def _routing_key(self, event): - return (self.dest_routing_key_prefix + - event.event.event_class.replace('/', '.').lower()) + return ( + self.dest_routing_key_prefix + + event.event.event_class.replace("/", ".").lower() + ) class TwistedQueueConsumerTask(BaseQueueConsumerTask): - def __init__(self, processor): BaseQueueConsumerTask.__init__(self, processor) self.queue = self._queueSchema.getQueue(QUEUE_RAW_ZEN_EVENTS) @@ -260,16 +285,20 @@ def processMessage(self, message): if log.isEnabledFor(logging.DEBUG): # assume to_dict() is expensive. log.debug("Publishing event: %s", to_dict(zepRawEvent)) - yield self.queueConsumer.publishMessage(EXCHANGE_ZEP_ZEN_EVENTS, - self._routing_key(zepRawEvent), zepRawEvent, declareExchange=False) + yield self.queueConsumer.publishMessage( + EXCHANGE_ZEP_ZEN_EVENTS, + self._routing_key(zepRawEvent), + zepRawEvent, + declareExchange=False, + ) yield self.queueConsumer.acknowledge(message) except DropEvent as e: if log.isEnabledFor(logging.DEBUG): # assume to_dict() is expensive. - log.debug('%s - %s', e.message, to_dict(e.event)) + log.debug("%s - %s", e.message, to_dict(e.event)) yield self.queueConsumer.acknowledge(message) except ProcessingException as e: - log.error('%s - %s', e.message, to_dict(e.event)) + log.error("%s - %s", e.message, to_dict(e.event)) log.exception(e) yield self.queueConsumer.reject(message) except Exception as e: @@ -282,7 +311,9 @@ def __init__(self, dmd): super(EventDTwistedWorker, self).__init__() self._amqpConnectionInfo = getUtility(IAMQPConnectionInfo) self._queueSchema = getUtility(IQueueSchema) - self._consumer_task = TwistedQueueConsumerTask(EventPipelineProcessor(dmd)) + self._consumer_task = TwistedQueueConsumerTask( + EventPipelineProcessor(dmd) + ) self._consumer = QueueConsumer(self._consumer_task, dmd) def run(self): @@ -290,7 +321,7 @@ def run(self): reactor.run() def _start(self): - reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) + reactor.addSystemEventTrigger("before", "shutdown", self._shutdown) self._consumer.run() @defer.inlineCallbacks @@ -309,20 +340,21 @@ def getConfig(self): class ZenEventD(ZCmdBase): - def __init__(self, *args, **kwargs): super(ZenEventD, self).__init__(*args, **kwargs) EventPipelineProcessor.SYNC_EVERY_EVENT = self.options.syncEveryEvent - EventPipelineProcessor.PROCESS_EVENT_TIMEOUT = self.options.process_event_timeout + EventPipelineProcessor.PROCESS_EVENT_TIMEOUT = ( + self.options.process_event_timeout + ) self._heartbeatSender = QueueHeartbeatSender( - 'localhost', 'zeneventd', self.options.heartbeatTimeout + "localhost", "zeneventd", self.options.heartbeatTimeout ) self._maintenanceCycle = MaintenanceCycle( self.options.maintenancecycle, self._heartbeatSender ) objectEventNotify(DaemonCreatedEvent(self)) config = ZenEventDConfig(self.options) - provideUtility(config, IDaemonConfig, 'zeneventd_config') + provideUtility(config, IDaemonConfig, "zeneventd_config") def sigTerm(self, signum=None, frame=None): log.info("Shutting down...") @@ -337,52 +369,69 @@ def run(self): def sighandler_USR1(self, signum, frame): super(ZenEventD, self).sighandler_USR1(signum, frame) - log.debug('sighandler_USR1 called %s', signum) + log.debug("sighandler_USR1 called %s", signum) objectEventNotify(SigUsr1Event(self, signum)) def buildOptions(self): super(ZenEventD, self).buildOptions() maintenanceBuildOptions(self.parser) self.parser.add_option( - '--synceveryevent', dest='syncEveryEvent', - action="store_true", default=False, - help=('Force sync() before processing every event; default is' - ' to sync() no more often than once every 1/2 second.') + "--synceveryevent", + dest="syncEveryEvent", + action="store_true", + default=False, + help=( + "Force sync() before processing every event; default is" + " to sync() no more often than once every 1/2 second." + ), ) self.parser.add_option( - '--process-event-timeout', dest='process_event_timeout', - type='int', default=0, - help=('Set the Timeout(in seconds) for processing each event.' - ' The timeout may be extended for a transforms using,' - 'signal.alarm() in the transform' - 'set to 0 to disable') + "--process-event-timeout", + dest="process_event_timeout", + type="int", + default=0, + help=( + "Set the Timeout(in seconds) for processing each event." + " The timeout may be extended for a transforms using," + "signal.alarm() in the transform" + "set to 0 to disable" + ), ) self.parser.add_option( - '--messagesperworker', dest='messagesPerWorker', default=1, + "--messagesperworker", + dest="messagesPerWorker", + default=1, type="int", - help=('Sets the number of messages each worker gets from the queue' - ' at any given time. Default is 1. Change this only if event' - ' processing is deemed slow. Note that increasing the value' - ' increases the probability that events will be processed' - ' out of order.') + help=( + "Sets the number of messages each worker gets from the queue" + " at any given time. Default is 1. Change this only if event" + " processing is deemed slow. Note that increasing the value" + " increases the probability that events will be processed" + " out of order." + ), ) self.parser.add_option( - '--maxpickle', dest='maxpickle', default=100, type="int", - help=('Sets the number of pickle files in' - ' var/zeneventd/failed_transformed_events.') + "--maxpickle", + dest="maxpickle", + default=100, + type="int", + help=( + "Sets the number of pickle files in" + " var/zeneventd/failed_transformed_events." + ), ) self.parser.add_option( - '--pickledir', dest='pickledir', - default=zenPath('var/zeneventd/failed_transformed_events'), + "--pickledir", + dest="pickledir", + default=zenPath("var/zeneventd/failed_transformed_events"), type="string", - help='Sets the path to save pickle files.' + help="Sets the path to save pickle files.", ) objectEventNotify(BuildOptionsEvent(self)) class Timeout: - - def __init__(self, event, seconds=1, error_message='Timeout'): + def __init__(self, event, seconds=1, error_message="Timeout"): self.seconds = seconds self.error_message = error_message self.event = event @@ -400,14 +449,14 @@ def __exit__(self, type, value, traceback): class TimeoutError(Exception): - def __init__(self, message, event=None): super(TimeoutError, self).__init__(message) self.event = event -if __name__ == '__main__': +if __name__ == "__main__": # explicit import of ZenEventD to activate enterprise extensions - from Products.ZenEvents.zeneventd import ZenEventD + from Products.ZenEvents.zeneventd import ZenEventD # noqa F811 + zed = ZenEventD() zed.run() diff --git a/Products/ZenEvents/zeneventmigrate.py b/Products/ZenEvents/zeneventmigrate.py deleted file mode 100644 index 1be669772b..0000000000 --- a/Products/ZenEvents/zeneventmigrate.py +++ /dev/null @@ -1,694 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2011, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -""" -Script used to migrate events from a Zenoss 3.1.x events database into the -new ZEP event schema. All properties of the events are mapped to the new -property values, and zeneventd identification/tagging is performed to ensure -that events will be associated with the correct entities in Zenoss. - -The migration script assumes that the old MySQL database no longer accepts -events, and saves the last event (per table) inside of the -zeneventmigrate.conf file. Note that only the MySQL server is required to be -available, and not all of Zenoss. - -On the 3x source system (assuming separate 3x and 4x systems), ensure that -the 4x system (in this example: 10.87.207.181) can acces the database. Create -a new user to access the events. - -mysql> grant SELECT on *.* to 'event_migrate'@'10.87.207.181' identified by 'password'; -mysql> flush privileges; - -From the remote machine, test the access to the 3x database, which (in this example -resides on 10.87.207.80. - -zends -uevent_migrate -ppassword -h 10.87.207.80 --port 3306 -D events - -From the 4.x system, you can then start the migration: - -zeneventmigrate --evthost=10.87.207.80 --evtport=3306 \ - --evtuser=event_migrate --evtpass=password --dont-fetch-args -""" - -import logging -import os -import sys -from time import mktime -from ConfigParser import ConfigParser, NoOptionError -from copy import deepcopy -from itertools import imap -from uuid import uuid4 -from signal import signal, siginterrupt, SIGTERM, SIGINT -from time import sleep - - -from Products.ZenUtils.mysql import MySQLdb -from MySQLdb import connect -from MySQLdb.cursors import DictCursor -from _mysql import escape_string - -from zenoss.protocols.protobufs.zep_pb2 import (EventSummary, ZepRawEvent, STATUS_NEW, STATUS_ACKNOWLEDGED, - STATUS_SUPPRESSED, STATUS_CLOSED, STATUS_CLEARED, - SYSLOG_PRIORITY_EMERG, SYSLOG_PRIORITY_DEBUG) -from zenoss.protocols.protobufs.model_pb2 import DEVICE, COMPONENT -from Products.ZenEvents.syslog_h import fac_values, LOG_FAC -from Products.ZenUtils.AmqpDataManager import AmqpTransaction -from Products.ZenUtils.ZenScriptBase import ZenScriptBase -from Products.ZenUtils.Utils import zenPath -from Products.ZenUtils.guid.interfaces import IGlobalIdentifier -from zope.component import getUtility -from Products.ZenMessaging.queuemessaging.interfaces import IQueuePublisher -from Products.ZenMessaging.queuemessaging.adapters import EventProtobufSeverityMapper -from Products.ZenEvents.events2.processing import EventProxy -from Products.ZenEvents.events2.processing import (Manager, EventContext, IdentifierPipe, AddDeviceContextAndTagsPipe, - AssignDefaultEventClassAndTagPipe) -from Products.ZenModel.DeviceClass import DeviceClass -from Products.ZenModel.DeviceGroup import DeviceGroup -from Products.ZenModel.Location import Location -from Products.ZenModel.System import System - -log = logging.getLogger('zen.EventMigrate') - -class MappingEventContext(object): - """ - Contains the event summary information to be published to the migrated - events queue. - """ - def __init__(self, event_dict): - self._event_dict = event_dict - self._summary = EventSummary() - self._occurrence = self._summary.occurrence.add() - self._actor = self._occurrence.actor - - @property - def event_dict(self): - return self._event_dict - - @property - def summary(self): - return self._summary - - @property - def occurrence(self): - return self._occurrence - - @property - def actor(self): - return self._actor - - def __str__(self): - return str(self._summary) - -def _user_uuid(dmd, userName): - # We have to call _getOb instead of getUserSettings here because the - # latter will create a new user settings object even if the user is - # not known. - try: - user = dmd.ZenUsers._getOb(userName) - return IGlobalIdentifier(user).getGUID() - except Exception: - if log.isEnabledFor(logging.DEBUG): - log.exception("Failed to look up user UUID for %s", userName) - -def _convert_summary(new_name, conversion_fcn = None): - """ - Returns a function to convert a value from a previous event into - its equivalent value in the EventSummary. - """ - def _convert_summary_internal(value, event_ctx): - if conversion_fcn: - value = conversion_fcn(value) - if value is not None: - setattr(event_ctx.summary, new_name, value) - return _convert_summary_internal - -def _convert_occurrence(new_name, conversion_fcn = None): - """ - Returns a function to convert a value from a previous event into - its equivalent value in the Event occurrence. - """ - def _convert_occurrence_internal(value, event_ctx): - if conversion_fcn: - value = conversion_fcn(value) - if value is not None: - setattr(event_ctx.occurrence, new_name, value) - return _convert_occurrence_internal - -def _add_detail(new_name, conversion_fcn = None): - """ - Returns a function to convert a value from a previous event into - its equivalent EventDetail within the event occurrence. - """ - def _add_detail_internal(value, event_ctx): - if conversion_fcn: - value = conversion_fcn(value) - if value is not None: - detail = event_ctx.occurrence.details.add() - detail.name = new_name - if not hasattr(value, '__iter__'): - value = (str(value),) - else: - value = map(str, value) - detail.value.extend(value) - return _add_detail_internal - -def _add_details(value, event_ctx): - """ - Converts event details from the detail table to EventDetail objects - on the event occurrence. - """ - for detail_name, detail_value in value.iteritems(): - detail = event_ctx.occurrence.details.add() - detail.name = detail_name - detail.value.append(detail_value) - -_AUDIT_LOG_CONVERSIONS = { - 'event state changed to acknowledged': STATUS_ACKNOWLEDGED, - 'deleted by user': STATUS_CLOSED, -} - -def _add_logs(dmd): - """ - Converts event logs from the log table to either AuditLog or - EventNote objects on the event summary depending on whether - the log message matches system generated values. - """ - def _add_logs_internal(value, event_ctx): - for log_row in value: - username = log_row['userName'] - useruuid = _user_uuid(dmd, username) - text = log_row['text'] - ctime = _convert_ts_to_millis(log_row['ctime']) - - audit_state = _AUDIT_LOG_CONVERSIONS.get(text.lower()) - if audit_state: - log = event_ctx.summary.audit_log.add(timestamp=ctime, - new_status=audit_state, - user_name=username) - if useruuid: - log.user_uuid = useruuid - else: - note = event_ctx.summary.notes.add(uuid=str(uuid4()), - user_name=username, - created_time=ctime, - message=text) - if useruuid: - note.user_uuid = useruuid - - return _add_logs_internal - -def _convert_actor(sub_type): - """ - Returns a function to convert a value from a previous event into - its equivalent value in the EventActor within the event occurrence. - """ - def _convert_actor_internal(value, event_ctx): - if value: - actor = event_ctx.actor - if not sub_type: - actor.element_type_id = DEVICE - actor.element_identifier = value - else: - actor.element_sub_type_id = COMPONENT - actor.element_sub_identifier = value - return _convert_actor_internal - -def _convert_severity(value): - return EventProtobufSeverityMapper.SEVERITIES[str(value).upper()] - -def _convert_pipe_delimited(value): - if value: - values = [val for val in value.split('|') if val] - return values if values else None - -_STATE_CONVERSIONS = { - 0: STATUS_NEW, - 1: STATUS_ACKNOWLEDGED, - 2: STATUS_SUPPRESSED, -} - -def _convert_state(status): - """ - Converts an event state from a previous event into the equivalent new - state. Events migrated from history get a status of STATUS_CLOSED or - STATUS_CLEARED depending on the presence of the clearid field. - """ - def _convert_state_internal(value, event_ctx): - if status: - event_ctx.summary.status = _STATE_CONVERSIONS.get(value, STATUS_NEW) - else: - event_ctx.summary.status = STATUS_CLEARED if event_ctx.event_dict.get('clearid','') else STATUS_CLOSED - - return _convert_state_internal - -def _convert_ts_to_millis(value): - return int(mktime(value.timetuple()) * 1000) - -def _convert_double_to_millis(value): - return int(value * 1000) - -def _drop_empty(value): - return value if value else None - -_FACILITY_CONVERSIONS = dict((k,LOG_FAC(v)) for k, v in fac_values.iteritems() if k not in ('facmask','nfacilities')) - -def _convert_facility(value): - """ - Converts a syslog facility from the old string format to the new - numeric format. This was changed because all systems don't use the - same mapping for syslog facilities and using a numeric facility - ensures we don't lose data from the original syslog event. - """ - if value and value in _FACILITY_CONVERSIONS: - return _FACILITY_CONVERSIONS[value] - -def _convert_priority(value): - if value >= SYSLOG_PRIORITY_EMERG and value <= SYSLOG_PRIORITY_DEBUG: - return value - -def _convert_event_class_mapping_uuid(dmd): - """ - Converts an event class mapping to the UUID of the event class - mapping. - """ - failed_mappings = set() - - def _convert_event_class_mapping_uuid_internal(value): - if value: - try: - value = value.encode('ascii') - components = value.split('/') - components.insert(-1, 'instances') - eventClass = dmd.unrestrictedTraverse('/zport/dmd/Events' + '/'.join(components)) - return IGlobalIdentifier(eventClass).getGUID() - except Exception: - if value not in failed_mappings: - failed_mappings.add(value) - if log.isEnabledFor(logging.DEBUG): - log.exception("Failed to resolve event class mapping: %s", value) - else: - log.warning('Failed to resolve event class mapping: %s', value) - return _convert_event_class_mapping_uuid_internal - -def _convert_ownerid(dmd): - def _convert_ownerid_internal(value, event_ctx): - if value: - event_ctx.summary.current_user_name = value - useruuid = _user_uuid(dmd, value) - if useruuid: - event_ctx.summary.current_user_uuid = useruuid - - return _convert_ownerid_internal - -class EventConverter(object): - """ - Utility class used to convert an old-style event from the status or - history table into the equivalent EventSummary protobuf. Other mappers - exist for converting old style events to event occurrences, but this - needs to preserve information that is stored in the event summary (i.e. - count, event notes, audit logs). - """ - - _FIELD_MAPPERS = { - 'evid': _convert_summary('uuid'), - 'dedupid': _convert_occurrence('fingerprint'), - 'device': _convert_actor(False), - 'component': _convert_actor(True), - 'eventClass': _convert_occurrence('event_class'), - 'eventKey': _convert_occurrence('event_key', _drop_empty), - 'summary': _convert_occurrence('summary'), - 'message': _convert_occurrence('message'), - 'severity': _convert_occurrence('severity', _convert_severity), - 'eventClassKey': _convert_occurrence('event_class_key', _drop_empty), - 'eventGroup': _convert_occurrence('event_group', _drop_empty), - 'stateChange': _convert_summary('status_change_time', _convert_ts_to_millis), - 'firstTime': _convert_summary('first_seen_time', _convert_double_to_millis), - 'lastTime': _convert_summary('last_seen_time', _convert_double_to_millis), - 'count': _convert_summary('count'), - 'prodState': _add_detail(EventProxy.PRODUCTION_STATE_DETAIL_KEY), - # This doesn't have an equivalent value in new schema - just add as detail - 'suppid': _add_detail('suppid', _drop_empty), - # Deprecated - 'manager': _add_detail('manager', _drop_empty), - 'agent': _convert_occurrence('agent', _drop_empty), - 'DeviceClass': _add_detail(EventProxy.DEVICE_CLASS_DETAIL_KEY, _drop_empty), - 'Location': _add_detail(EventProxy.DEVICE_LOCATION_DETAIL_KEY, _drop_empty), - 'Systems': _add_detail(EventProxy.DEVICE_SYSTEMS_DETAIL_KEY, _convert_pipe_delimited), - 'DeviceGroups': _add_detail(EventProxy.DEVICE_GROUPS_DETAIL_KEY, _convert_pipe_delimited), - 'ipAddress': _add_detail(EventProxy.DEVICE_IP_ADDRESS_DETAIL_KEY, _drop_empty), - 'facility': _convert_occurrence('syslog_facility', _convert_facility), - 'priority': _convert_occurrence('syslog_priority', _convert_priority), - 'ntevid': _convert_occurrence('nt_event_code', _drop_empty), - 'clearid': _convert_summary('cleared_by_event_uuid', _drop_empty), - 'DevicePriority': _add_detail(EventProxy.DEVICE_PRIORITY_DETAIL_KEY), - 'monitor': _convert_occurrence('monitor', _drop_empty), - 'deletedTime': _convert_summary('status_change_time', _convert_ts_to_millis), - 'details': _add_details, - } - - def __init__(self, dmd, status): - self.dmd = dmd - self.status = status - # Most of these can be shared above - a few require DMD access - self.field_mappers = dict(EventConverter._FIELD_MAPPERS) - self.field_mappers['ownerid'] = _convert_ownerid(dmd) - self.field_mappers['eventState'] = _convert_state(status) - self.field_mappers['eventClassMapping'] = _convert_occurrence('event_class_mapping_uuid', - _convert_event_class_mapping_uuid(dmd)) - self.field_mappers['logs'] = _add_logs(dmd) - - def convert(self, event_dict): - event_ctx = MappingEventContext(event_dict) - for name, value in event_dict.iteritems(): - if name in self.field_mappers: - self.field_mappers[name](value, event_ctx) - else: - _add_detail(name)(value, event_ctx) - return event_ctx - -_IN_CLAUSE = lambda evids: ','.join("'%s'" % evid for evid in evids) - -class ShutdownException(Exception): - pass - -class ZenEventMigrate(ZenScriptBase): - def __init__(self, noopts=0, app=None, connect=True): - super(ZenEventMigrate, self).__init__(noopts=noopts, app=app, connect=connect) - self.config_filename = zenPath('etc/zeneventmigrate.conf') - self.config_section = 'zeneventmigrate' - self._shutdown = False - - def buildOptions(self): - super(ZenEventMigrate, self).buildOptions() - self.parser.add_option('--dont-fetch-args', dest='fetchArgs', default=True, action='store_false', - help='By default MySQL connection information' - ' is retrieved from Zenoss if not' - ' specified and if Zenoss is available.' - ' This disables fetching of these values' - ' from Zenoss.') - self.parser.add_option('--evthost', dest='evthost', default='127.0.0.1', - help='Events database hostname (Default: %default)') - self.parser.add_option('--evtport', dest='evtport', action='store', type='int', default=3306, - help='Port used to connect to the events database (Default: %default)') - self.parser.add_option('--evtuser', dest='evtuser', default=None, - help='Username used to connect to the events database') - self.parser.add_option('--evtpass', dest='evtpass', default=None, - help='Password used to connect to the events database') - self.parser.add_option('--evtdb', dest='evtdb', default='events', - help='Name of events database (Default: %default)') - self.parser.add_option('--batchsize', dest='batchsize', action='store', type='int', default=100, - help='Number of events to process in one batch (Default: %default)') - self.parser.add_option('--sleep', dest='sleep', action='store', type='int', default=0, - help='Number of seconds to wait after migrating a batch of events (Default: %default)') - self.parser.add_option('--restart', dest='restart', action='store_true', default=False, - help='Use this flag to start a new migration process (disables resuming a previous ' - 'migration).') - - def _output(self, message): - if sys.stdout.isatty(): - print message - else: - log.info(message) - - def _progress(self, message): - if sys.stdout.isatty(): - sys.stdout.write("\r" + message) - sys.stdout.flush() - else: - log.info(message) - - def _loadConfig(self): - self.config = ConfigParser() - self.config.read(self.config_filename) - if not self.config.has_section(self.config_section): - self.config.add_section(self.config_section) - - def _storeConfig(self): - with open(self.config_filename, 'wb') as configfile: - self.config.write(configfile) - - def _getConfig(self, option, default=None): - try: - return self.config.get(self.config_section, option) - except NoOptionError: - return default - - def _setConfig(self, option, value): - self.config.set(self.config_section, option, value) - - def _execQuery(self, conn, sql, args=None): - cursor = None - try: - cursor = conn.cursor() - cursor.execute(sql, args) - rows = cursor.fetchall() - return rows - finally: - if cursor: - cursor.close() - - def _countQuery(self, conn, sql, args=None): - cursor = None - try: - cursor = conn.cursor() - cursor.execute(sql, args) - rows = cursor.fetchall() - cursor.execute("SELECT FOUND_ROWS() AS num_rows") - count = cursor.fetchone()['num_rows'] - return rows, count - finally: - if cursor: - cursor.close() - - def _add_details(self, conn, evids, events_by_evid): - """ - Queries the database for event details for all of the events with the specified - event ids. Each returned detail is added to the event dictionary for the event - in events_by_evid. - """ - query = "SELECT evid, name, value FROM detail WHERE evid IN (%s)" % _IN_CLAUSE(evids) - rows = self._execQuery(conn, query) - for row in rows: - evid = row['evid'] - event = events_by_evid[evid] - if not 'details' in event: - event['details'] = {} - event['details'][row['name']] = row['value'] - - def _add_logs(self, conn, evids, events_by_evid): - """ - Queries the database for event logs for all of the events with the specified - event ids. Each returned log is added to the event dictionary for the event - in events_by_evid. - """ - query = "SELECT * FROM log WHERE evid IN (%s)" % _IN_CLAUSE(evids) - rows = self._execQuery(conn, query) - for row in rows: - evid = row.pop('evid') - event = events_by_evid[evid] - if not 'logs' in event: - event['logs'] = [] - event['logs'].append(row) - - def _page_rows(self, conn, status=True): - """ - Pages through rows in the database in either the status or history - table. After returning a batch of rows, the location of the last - processed event is persisted to disk to ensure we resume from the - right location in case the process is aborted for any reason. - """ - table = 'status' if status else 'history' - - offset = 0 - last_evid = self._getConfig('%s_last_evid' % table) - where = "WHERE evid > '%s'" % escape_string(last_evid) if last_evid else '' - - if last_evid: - num_rows_query = "SELECT SQL_CALC_FOUND_ROWS evid FROM %s %s LIMIT 0" % (table, where) - num_rows = self._countQuery(conn, num_rows_query)[1] - else: - num_rows_query = "SELECT COUNT(*) AS num_rows FROM %s" % table - num_rows = self._execQuery(conn, num_rows_query)[0]['num_rows'] - - if not num_rows: - self._output("No events to migrate from %s" % table) - return - - query = "SELECT * FROM %s %s ORDER BY evid LIMIT %%s OFFSET %%s" % (table, where) - rows = self._execQuery(conn, query, (self.options.batchsize, offset)) - while not self._shutdown and rows: - self._progress("Processing events in %s: [%d/%d]" % (table, offset, num_rows)) - evids = [] - events_by_evid = {} - for row in rows: - evid = row['evid'] - events_by_evid[evid] = row - evids.append(evid) - self._add_details(conn, evids, events_by_evid) - self._add_logs(conn, evids, events_by_evid) - yield rows - self._setConfig('%s_last_evid' % table, rows[-1]['evid']) - self._storeConfig() - if self.options.sleep: - log.debug("Pausing event migration for %s seconds", self.options.sleep) - sleep(self.options.sleep) - offset += self.options.batchsize - rows = self._execQuery(conn, query, (self.options.batchsize, offset)) - - if not self._shutdown: - self._progress("Processing events in %s: [%d/%d]\n" % (table, num_rows, num_rows)) - - - def _event_to_zep_raw_event(self, event): - """ - Converts an event occurrence into a ZepRawEvent (required for running through - zeneventd pipes). - """ - zepRawEvent = ZepRawEvent() - zepRawEvent.event.CopyFrom(event) - return zepRawEvent - - def _merge_tags(self, zep_raw_event, event): - """ - Merges results from the identification and tagging pipes into the event - occurrence to be published. This will take the element_uuid, element_sub_uuid, titles - and tags from the ZEP raw event and copy them to the appropriate place on - the event occurrence. - """ - raw_actor = zep_raw_event.event.actor - event_actor = event.actor - for field in ('element_uuid', 'element_sub_uuid', 'element_title', 'element_sub_title'): - if raw_actor.HasField(field): - setattr(event_actor, field, getattr(raw_actor, field)) - event.tags.extend(imap(deepcopy, zep_raw_event.event.tags)) - - def _migrate_events(self, conn, publisher, status): - converter = EventConverter(self.dmd, status) - manager = Manager(self.dmd) - pipes = (IdentifierPipe(manager), AddDeviceContextAndTagsPipe(manager), - AssignDefaultEventClassAndTagPipe(manager)) - routing_key = 'zenoss.events.summary' if status else 'zenoss.events.archive' - - taggers = { - EventProxy.DEVICE_CLASS_DETAIL_KEY: (self.dmd.Devices, DeviceClass), - EventProxy.DEVICE_GROUPS_DETAIL_KEY: (self.dmd.Groups, DeviceGroup), - EventProxy.DEVICE_LOCATION_DETAIL_KEY: (self.dmd.Locations, Location), - EventProxy.DEVICE_SYSTEMS_DETAIL_KEY: (self.dmd.Systems, System), - } - - try: - for event_rows in self._page_rows(conn, status): - with AmqpTransaction(publisher.channel): - for mapping_event_context in imap(converter.convert, event_rows): - if self._shutdown: - raise ShutdownException() - occurrence = mapping_event_context.occurrence - zep_raw_event = self._event_to_zep_raw_event(occurrence) - event_ctx = EventContext(log, zep_raw_event) - for pipe in pipes: - pipe(event_ctx) - - # Clear tags for device class, location, systems, groups from current device - event_ctx.eventProxy.tags.clearType(AddDeviceContextAndTagsPipe.DEVICE_TAGGERS.keys()) - - # Resolve tags from original fields in the event - for detail in occurrence.details: - if detail.name in taggers: - organizer_root, organizer_cls = taggers[detail.name] - tags = set() - for val in detail.value: - try: - obj = organizer_root.unrestrictedTraverse(str(val[1:])) - if isinstance(obj, organizer_cls): - tags.update(manager.getUuidsOfPath(obj)) - except Exception: - if log.isEnabledFor(logging.DEBUG): - log.debug("Unable to resolve UUID for %s", val) - if tags: - event_tag = occurrence.tags.add() - event_tag.type = detail.name - event_tag.uuid.extend(tags) - - self._merge_tags(zep_raw_event, occurrence) - if log.isEnabledFor(logging.DEBUG): - log.debug("Migrated event: %s", mapping_event_context.summary) - - publisher.publish("$MigratedEvents", routing_key, mapping_event_context.summary, - createQueues=("$ZepMigratedEventSummary","$ZepMigratedEventArchive")) - except ShutdownException: - pass - - def _sigterm(self, signum=None, frame=None): - log.debug('SIGTERM signal caught') - self._shutdown = True - self._output('\nShutting down...') - - def run(self): - signal(SIGTERM, self._sigterm) - signal(SIGINT, self._sigterm) - # Try to avoid stacktraces from interrupted signal calls - siginterrupt(SIGTERM, False) - siginterrupt(SIGINT, False) - - if self.options.restart: - if os.path.exists(self.config_filename): - os.remove(self.config_filename) - - self._loadConfig() - if self.options.batchsize <= 0: - self.parser.error('Invalid argument for --batchsize parameter - must be positive') - if self.options.sleep < 0: - self.parser.error('Invalid argument for --sleep parameter') - - if not self.options.fetchArgs: - if not self.options.evtuser or self.options.evtpass is None: - self.parser.error('Required arguments --evtuser and --evtpass must be provided when using ' - '--dont-fetch-args') - else: - zem = self.dmd.ZenEventManager - self.options.evthost = zem.host - self.options.evtport = zem.port - self.options.evtuser = zem.username - self.options.evtpass = zem.password - self.options.evtdb = zem.database - conn = None - publisher = None - try: - conn = connect(host=self.options.evthost, - user=self.options.evtuser, - passwd=self.options.evtpass, - db=self.options.evtdb, - port=self.options.evtport, - cursorclass=DictCursor, - use_unicode=True) - conn.autocommit(1) - - publisher = getUtility(IQueuePublisher) - - # Migrate status - self._migrate_events(conn, publisher, True) - - # Migrate history - self._migrate_events(conn, publisher, False) - - except Exception as e: - if log.isEnabledFor(logging.DEBUG): - log.exception('Error migrating events') - print >>sys.stderr, "Failed to migrate events: %s" % e - finally: - if publisher: - publisher.close() - if conn: - conn.close() - - -if __name__ == '__main__': - migrate = ZenEventMigrate() - migrate.run() diff --git a/Products/ZenEvents/zensyslog.py b/Products/ZenEvents/zensyslog.py deleted file mode 100644 index 519afa89d3..0000000000 --- a/Products/ZenEvents/zensyslog.py +++ /dev/null @@ -1,363 +0,0 @@ -#! /usr/bin/env python -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2008, 2011, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -__doc__ = """zensyslog - -Turn syslog messages into events. - -""" - -import time -import socket -import os -import logging - -from twisted.internet.protocol import DatagramProtocol -from twisted.internet import reactor, defer, udp -from twisted.python import failure - -import zope.interface -import zope.component - - -from Products.ZenCollector.daemon import CollectorDaemon -from Products.ZenCollector.interfaces import ICollector, ICollectorPreferences,\ - IEventService, \ - IScheduledTask, IStatisticsService -from Products.ZenCollector.tasks import SimpleTaskFactory,\ - SimpleTaskSplitter,\ - BaseTask, TaskStates -from Products.ZenUtils.observable import ObservableMixin - -from Products.ZenEvents.SyslogProcessing import SyslogProcessor - -from Products.ZenUtils.Utils import zenPath -from Products.ZenUtils.IpUtil import asyncNameLookup - -from Products.ZenEvents.EventServer import Stats -from Products.ZenUtils.Utils import unused -from Products.ZenCollector.services.config import DeviceProxy -unused(DeviceProxy) - -COLLECTOR_NAME = 'zensyslog' -log = logging.getLogger("zen.%s" % COLLECTOR_NAME) - - -class SyslogPreferences(object): - zope.interface.implements(ICollectorPreferences) - - def __init__(self): - """ - Constructs a new PingCollectionPreferences instance and - provides default values for needed attributes. - """ - self.collectorName = COLLECTOR_NAME - self.configCycleInterval = 20 # minutes - self.cycleInterval = 5 * 60 # seconds - - # The configurationService attribute is the fully qualified class-name - # of our configuration service that runs within ZenHub - self.configurationService = 'Products.ZenHub.services.SyslogConfig' - - # Will be filled in based on buildOptions - self.options = None - - self.configCycleInterval = 20*60 - - def postStartupTasks(self): - task = SyslogTask(COLLECTOR_NAME, configId=COLLECTOR_NAME) - yield task - - def buildOptions(self, parser): - """ - Command-line options to be supported - """ - SYSLOG_PORT = 514 - try: - SYSLOG_PORT = socket.getservbyname('syslog', 'udp') - except socket.error: - pass - - parser.add_option('--parsehost', dest='parsehost', - action='store_true', default=False, - help='Try to parse the hostname part of a syslog HEADER' - ) - parser.add_option('--stats', dest='stats', - action='store_true', default=False, - help='Print statistics to log every 2 secs') - parser.add_option('--logorig', dest='logorig', - action='store_true', default=False, - help='Log the original message') - parser.add_option('--logformat', dest='logformat', - default='human', - help='Human-readable (/var/log/messages) or raw (wire)' - ) - parser.add_option('--minpriority', dest='minpriority', - default=6, type='int', - help='Minimum priority message that zensyslog will accept' - ) - parser.add_option('--syslogport', dest='syslogport', - default=SYSLOG_PORT, type='int', - help='Port number to use for syslog events' - ) - parser.add_option('--listenip', dest='listenip', - default='0.0.0.0', - help='IP address to listen on. Default is %default' - ) - parser.add_option('--useFileDescriptor', - dest='useFileDescriptor', type='int', - help='Read from an existing connection rather opening a new port.' - , default=None) - parser.add_option('--noreverseLookup', dest='noreverseLookup', - action='store_true', default=False, - help="Don't convert the remote device's IP address to a hostname." - ) - - def postStartup(self): - daemon = zope.component.getUtility(ICollector) - daemon.defaultPriority = 1 - - # add our collector's custom statistics - statService = zope.component.queryUtility(IStatisticsService) - statService.addStatistic("events", "COUNTER") - - -class SyslogTask(BaseTask, DatagramProtocol): - """ - Listen for syslog messages and turn them into events - Connects to the TrapService service in zenhub. - """ - zope.interface.implements(IScheduledTask) - - SYSLOG_DATE_FORMAT = '%b %d %H:%M:%S' - SAMPLE_DATE = 'Apr 10 15:19:22' - - def __init__(self, taskName, configId, - scheduleIntervalSeconds=3600, taskConfig=None): - BaseTask.__init__(self, taskName, configId, - scheduleIntervalSeconds, taskConfig) - self.log = log - - # Needed for interface - self.name = taskName - self.configId = configId - self.state = TaskStates.STATE_IDLE - self.interval = scheduleIntervalSeconds - self._preferences = taskConfig - self._daemon = zope.component.getUtility(ICollector) - self._eventService = zope.component.queryUtility(IEventService) - self._statService = zope.component.queryUtility(IStatisticsService) - self._preferences = self._daemon - - self.options = self._daemon.options - - self.stats = Stats() - - if not self.options.useFileDescriptor\ - and self.options.syslogport < 1024: - self._daemon.openPrivilegedPort('--listen', '--proto=udp', - '--port=%s:%d' - % (self.options.listenip, - self.options.syslogport)) - self._daemon.changeUser() - self.minpriority = self.options.minpriority - self.processor = None - - if self.options.logorig: - self.olog = logging.getLogger('origsyslog') - self.olog.setLevel(20) - self.olog.propagate = False - lname = zenPath('log/origsyslog.log') - hdlr = logging.FileHandler(lname) - hdlr.setFormatter(logging.Formatter('%(message)s')) - self.olog.addHandler(hdlr) - - if self.options.useFileDescriptor is not None: - self.useUdpFileDescriptor(int(self.options.useFileDescriptor)) - else: - reactor.listenUDP(self.options.syslogport, self, - interface=self.options.listenip) - - # yield self.model().callRemote('getDefaultPriority') - self.processor = SyslogProcessor(self._eventService.sendEvent, - self.options.minpriority, self.options.parsehost, - self.options.monitor, self._daemon.defaultPriority) - - def doTask(self): - """ - This is a wait-around task since we really are called - asynchronously. - """ - return defer.succeed("Waiting for syslog messages...") - - def useUdpFileDescriptor(self, fd): - s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_DGRAM) - os.close(fd) - port = s.getsockname()[1] - transport = udp.Port(port, self) - s.setblocking(0) - transport.socket = s - transport.fileno = s.fileno - transport.connected = 1 - transport._realPortNumber = port - self.transport = transport - # hack around startListening not being called - self.numPorts = 1 - transport.startReading() - - def expand(self, msg, client_address): - """ - Expands a syslog message into a string format suitable for writing - to the filesystem such that it appears the same as it would - had the message been logged by the syslog daemon. - - @param msg: syslog message - @type msg: string - @param client_address: IP info of the remote device (ipaddr, port) - @type client_address: tuple of (string, number) - @return: message - @rtype: string - """ - # pri := facility * severity - stop = msg.find('>') - - # check for a datestamp. default to right now if date not present - start = stop + 1 - stop = start + len(SyslogTask.SAMPLE_DATE) - dateField = msg[start:stop] - try: - date = time.strptime(dateField, - SyslogTask.SYSLOG_DATE_FORMAT) - year = time.localtime()[0] - date = (year, ) + date[1:] - start = stop + 1 - except ValueError: - - # date not present, so use today's date - date = time.localtime() - - # check for a hostname. default to localhost if not present - stop = msg.find(' ', start) - if msg[stop - 1] == ':': - hostname = client_address[0] - else: - hostname = msg[start:stop] - start = stop + 1 - - # the message content - body = msg[start:] - - # assemble the message - prettyTime = time.strftime(SyslogTask.SYSLOG_DATE_FORMAT, date) - message = '%s %s %s' % (prettyTime, hostname, body) - return message - - def datagramReceived(self, msg, client_address): - """ - Consume the network packet - - @param msg: syslog message - @type msg: string - @param client_address: IP info of the remote device (ipaddr, port) - @type client_address: tuple of (string, number) - """ - if msg == "": - self.log.debug("Received empty datagram. Discarding.") - return - (ipaddr, port) = client_address - if self.options.logorig: - if self.options.logformat == 'human': - message = self.expand(msg, client_address) - else: - message = msg - self.olog.info(message) - - if self.options.noreverseLookup: - d = defer.succeed(ipaddr) - else: - d = asyncNameLookup(ipaddr) - d.addBoth(self.gotHostname, (msg, ipaddr, time.time())) - - def gotHostname(self, response, data): - """ - Send the resolved address, if possible, and the event via the thread - - @param response: Twisted response - @type response: Twisted response - @param data: (msg, ipaddr, rtime) - @type data: tuple of (string, string, datetime object) - """ - (msg, ipaddr, rtime) = data - if isinstance(response, failure.Failure): - host = ipaddr - else: - host = response - if self.processor: - self.processor.process(msg, ipaddr, host, rtime) - totalTime, totalEvents, maxTime = self.stats.report() - stat = self._statService.getStatistic("events") - stat.value = totalEvents - - def displayStatistics(self): - totalTime, totalEvents, maxTime = self.stats.report() - display = "%d events processed in %.2f seconds" % ( - totalEvents, - totalTime) - if totalEvents > 0: - display += """ -%.5f average seconds per event -Maximum processing time for one event was %.5f""" % ( - (totalTime / totalEvents), maxTime) - return display - - def cleanup(self): - status = self.displayStatistics() - self.log.info(status) - - -class SyslogConfigTask(ObservableMixin): - """ - Receive a configuration object containing the default priority - """ - zope.interface.implements(IScheduledTask) - - def __init__(self, taskName, configId, - scheduleIntervalSeconds=3600, taskConfig=None): - super(SyslogConfigTask, self).__init__() - - # Needed for ZCA interface contract - self.name = taskName - self.configId = configId - self.state = TaskStates.STATE_IDLE - self.interval = scheduleIntervalSeconds - self._preferences = taskConfig - self._daemon = zope.component.getUtility(ICollector) - - self._daemon.defaultPriority = self._preferences.defaultPriority - - def doTask(self): - return defer.succeed("Already updated default syslog priority...") - - def cleanup(self): - pass - - -class SyslogDaemon(CollectorDaemon): - - _frameworkFactoryName = "nosip" - - -if __name__=='__main__': - myPreferences = SyslogPreferences() - myTaskFactory = SimpleTaskFactory(SyslogConfigTask) - myTaskSplitter = SimpleTaskSplitter(myTaskFactory) - daemon = SyslogDaemon(myPreferences, myTaskSplitter) - daemon.run() diff --git a/Products/ZenEvents/zensyslog/__init__.py b/Products/ZenEvents/zensyslog/__init__.py new file mode 100644 index 0000000000..654b7f5ef6 --- /dev/null +++ b/Products/ZenEvents/zensyslog/__init__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + + +def main(): + from .daemon import SyslogDaemon + SyslogDaemon().run() diff --git a/Products/ZenEvents/zensyslog/__main__.py b/Products/ZenEvents/zensyslog/__main__.py new file mode 100644 index 0000000000..8ff54492bd --- /dev/null +++ b/Products/ZenEvents/zensyslog/__main__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +if __name__ == "__main__": + from Products.ZenEvents.zensyslog import main + + main() diff --git a/Products/ZenEvents/zensyslog/config.py b/Products/ZenEvents/zensyslog/config.py new file mode 100644 index 0000000000..36505f58eb --- /dev/null +++ b/Products/ZenEvents/zensyslog/config.py @@ -0,0 +1,63 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2008, 2011, 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from twisted.spread import pb + + +class ConfigChecksums(pb.Copyable, pb.RemoteCopy): + """ + Object for requesting zensyslog config data. + + Each field is a token returned from zenhub. For the first request, + the fields should be None. + """ + + __slots__ = ("priority", "parsers", "use_summary", "rules") + + def __init__( + self, priority=None, parsers=None, use_summary=None, rules=None + ): + self.priority = priority + self.parsers = parsers + self.use_summary = use_summary + self.rules = rules + + def __repr__(self): + return "{}({})".format( + self.__class__.__name__, + ", ".join( + "{}={}".format(name, getattr(self, name)) + for name in self.__slots__ + ), + ) + + +pb.setUnjellyableForClass(ConfigChecksums, ConfigChecksums) + + +class ConfigUpdates(pb.Copyable, pb.RemoteCopy): + """ + Configuration for zensyslog. + """ + + __slots__ = ("priority", "parsers", "use_summary", "rules", "checksums") + + def __init__( + self, priority=None, parsers=None, use_summary=None, rules=None + ): + self.priority = priority + self.parsers = parsers + self.use_summary = use_summary + self.rules = rules + self.checksums = ConfigChecksums() + + +pb.setUnjellyableForClass(ConfigUpdates, ConfigUpdates) diff --git a/Products/ZenEvents/zensyslog/daemon.py b/Products/ZenEvents/zensyslog/daemon.py new file mode 100644 index 0000000000..cfd58bd11e --- /dev/null +++ b/Products/ZenEvents/zensyslog/daemon.py @@ -0,0 +1,314 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2008, 2011, 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import socket +import sys + +from twisted.internet import defer, reactor +from twisted.internet.task import LoopingCall +from zope.component import provideUtility + +from Products.ZenCollector.utils.maintenance import ZenHubHeartbeatSender +from Products.ZenEvents.ZenEventClasses import Info +from Products.ZenHub.interfaces import ICollectorEventTransformer +from Products.ZenHub.PBDaemon import PBDaemon + +from .loader import ConfigLoader +from .loggers import DropLogger, MessageLogger, RawFormatter, HumanFormatter +from .processor import Parsers, SyslogProcessor +from .protocol import SyslogProtocol +from .receiver import AdoptPort, CreatePort, Receiver +from .transformer import FilterRules, SyslogMsgFilter + +_dropped_counter_names = ("eventFilterDroppedCount", "eventParserDroppedCount") +_drop_events_task_interval = 3600 + + +class SyslogDaemon(PBDaemon): + """ + Daemon for receiving SysLog events and recording them as Zenoss events. + """ + + mname = name = "zensyslog" + + _configservice = "Products.ZenHub.services.SyslogConfig" + initialServices = PBDaemon.initialServices + [_configservice] + + def __init__(self, *args, **kwargs): + super(SyslogDaemon, self).__init__(*args, **kwargs) + + self.configCycleInterval = 2 * 60 # seconds + self.cycleInterval = 5 * 60 # seconds + + self._rules = FilterRules(self) + self._event_filter = SyslogMsgFilter(self._rules, self.counters) + provideUtility(self._event_filter, ICollectorEventTransformer) + + self._heartbeat_sender = ZenHubHeartbeatSender( + self.options.monitor, + self.name, + self.options.heartbeatTimeout, + ) + self._heartbeat_task = None + + self._parsers = Parsers(self.sendEvent) + self._processor = SyslogProcessor( + self.sendEvent, + self.options.minpriority, + self.options.parsehost, + self.options.monitor, + self._parsers, + ) + self._loader = ConfigLoader( + self.getRemoteConfigServiceProxy, + self._parsers, + self._processor, + self._rules, + ) + self._loader_task = None + + self._drop_events_task = None + + self._receiver = None + + def buildOptions(self): + super(SyslogDaemon, self).buildOptions() + try: + SYSLOG_PORT = socket.getservbyname("syslog", "udp") + except socket.error: + SYSLOG_PORT = 514 + self.parser.add_option( + "--parsehost", + dest="parsehost", + action="store_true", + default=False, + help="Try to parse the hostname part of a syslog HEADER", + ) + self.parser.add_option( + "--stats", + dest="stats", + action="store_true", + default=False, + help="Print statistics to log every 2 secs", + ) + self.parser.add_option( + "--logorig", + dest="logorig", + action="store_true", + default=False, + help="Log the original message", + ) + self.parser.add_option( + "--logformat", + dest="logformat", + default="human", + help="Human-readable (/var/log/messages) or raw (wire)", + ) + self.parser.add_option( + "--minpriority", + dest="minpriority", + default=6, + type="int", + help="Minimum priority message that zensyslog will accept", + ) + self.parser.add_option( + "--syslogport", + dest="syslogport", + default=SYSLOG_PORT, + type="int", + help="Port number to use for syslog events", + ) + self.parser.add_option( + "--listenip", + dest="listenip", + default="0.0.0.0", # noqa: S104 + help="IP address to listen on. Default is %default", + ) + self.parser.add_option( + "--useFileDescriptor", + dest="useFileDescriptor", + type="int", + help="Read from an existing connection rather opening a new port.", + default=None, + ) + self.parser.add_option( + "--noreverseLookup", + dest="noreverseLookup", + action="store_true", + default=False, + help="Don't convert the remote device's IP address to a hostname.", + ) + + # @override + def run(self): + if ( + not self.options.useFileDescriptor + and self.options.syslogport < 1024 + ): + self.log.info( + "opening privileged port %s", self.options.syslogport + ) + # Makes a call to zensocket here, + # which performs an exec* so it never returns. + self.openPrivilegedPort( + "--listen", + "--proto=udp", + "--port=%s:%d" + % (self.options.listenip, self.options.syslogport), + ) + self.log.error("Failed to open privileged port") + sys.exit(1) + super(SyslogDaemon, self).run() + + # @override + @defer.inlineCallbacks + def connected(self): + try: + # initial config load + yield self._loader.task() + + self._start_heartbeat_task() + self._start_loader_task() + self._start_drop_events_task() + self._start_receiver() + except Exception: + self.log.exception("BOOM!") + + # @override + def postStatisticsImpl(self): + if self._receiver is None: + return + totalTime, totalEvents, maxTime = self._processor.stats.report() + self.rrdStats.counter("events", totalEvents) + + @defer.inlineCallbacks + def getRemoteConfigServiceProxy(self): + """Return the remote configuration service proxy.""" + proxy = yield self.getService(self._configservice) + defer.returnValue(proxy) + + def _start_heartbeat_task(self): + self._heartbeat_task = LoopingCall(self._heartbeat_sender.heartbeat) + self._heartbeat_task.start(self.cycleInterval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_heartbeat_task + ) + self.log.info("started task for sending heartbeats") + + def _stop_heartbeat_task(self): + if self._heartbeat_task is None: + return + self._heartbeat_task.stop() + self._heartbeat_task = None + self.log.info("stopped task for sending heartbeats") + + def _start_loader_task(self): + self._loader_task = LoopingCall(self._loader.task) + self._loader_task.start(self.cycleInterval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_loader_task + ) + self.log.info("started task to retrieve configuration data") + + def _stop_loader_task(self): + if self._loader_task is None: + return + self._loader_task.stop() + self._loader_task = None + self.log.info("stopped task to retrieve configuration data") + + def _start_drop_events_task(self): + self._drop_events_task = LoopingCall(self._send_drop_events) + self._drop_events_task.start(_drop_events_task_interval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_drop_events_task + ) + self.log.info( + "started task to send events with the count of dropped events" + ) + + def _stop_drop_events_task(self): + if self._drop_events_task is None: + return + self._drop_events_task.stop() + self._drop_events_task = None + self.log.info( + "stopped task to send events with the count of dropped events" + ) + + def _start_receiver(self): + protocol = self._build_protocol() + portfactory = self._build_port_factory() + self._receiver = Receiver(protocol, portfactory) + self._receiver.start() + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_receiver + ) + reactor.addSystemEventTrigger( + "after", "shutdown", self._displayStatistics + ) + self.log.info("started receiving syslog messages") + + def _stop_receiver(self): + if self._receiver is None: + return + self._receiver.stop() + self._receiver = None + self.log.info("stopped receiving syslog messages") + + def _build_protocol(self): + if self.options.logorig: + if self.options.logformat == "human": + formatter = HumanFormatter() + else: + formatter = RawFormatter() + logger = MessageLogger(formatter) + else: + logger = DropLogger() + + return SyslogProtocol( + self._processor, + logger, + self.counters, + self.options.noreverseLookup, + ) + + def _build_port_factory(self): + if self.options.useFileDescriptor is not None: + fd = int(self.options.useFileDescriptor) + return AdoptPort(fd) + return CreatePort(self.options.syslogport, self.options.listenip) + + def _send_drop_events(self): + for name in _dropped_counter_names: + count = self.counters[name] + event = { + "component": self.name, + "device": self.options.monitor, + "eventClass": "/App/Zenoss", + "eventKey": "zensyslog.{}".format(name), + "summary": "{}: {}".format(name, count), + "severity": Info, + } + self.sendEvent(event) + + def _displayStatistics(self): + totalTime, totalEvents, maxTime = self._processor.stats.report() + display = "%d events processed in %.2f seconds" % ( + totalEvents, + totalTime, + ) + if totalEvents > 0: + display += ( + "\n%.5f average seconds per event\n" + "Maximum processing time for one event was %.5f\n" + ) % ((totalTime / totalEvents), maxTime) + return display diff --git a/Products/ZenEvents/zensyslog/loader.py b/Products/ZenEvents/zensyslog/loader.py new file mode 100644 index 0000000000..58cf0153f7 --- /dev/null +++ b/Products/ZenEvents/zensyslog/loader.py @@ -0,0 +1,80 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2008, 2011, 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from twisted.internet import defer + +from .config import ConfigChecksums + +log = logging.getLogger("zen.zensyslog.configloader") + + +class ConfigLoader(object): + """Handles retrieving additional dynamic configs for daemon from ZODB""" + + def __init__(self, servicefactory, parsers, processor, rules): + self._servicefactory = servicefactory + self._parsers = parsers + self._processor = processor + self._rules = rules + self._checksums = ConfigChecksums() + + @defer.inlineCallbacks + def task(self): + """ + Contact zenhub and gather configuration data. + """ + log.debug("retrieving zensyslog configuration") + try: + service = yield self._servicefactory() + updates = yield service.callRemote("getConfig", self._checksums) + except Exception: + log.exception("failed to retrieve syslog configuration") + else: + log.debug("zensyslog configuration retrieved") + self._process_priorty(updates) + self._process_parsers(updates) + self._process_use_summary(updates) + self._process_rules(updates) + log.debug("applied zensyslog configuration changes") + + def _process_priorty(self, updates): + if updates.checksums.priority is None: + return + state = "updated" if self._checksums.priority else "initial" + log.info("received %s default event priority", state) + self._checksums.priority = updates.checksums.priority + self._processor.priority = updates.priority + + def _process_use_summary(self, updates): + if updates.checksums.use_summary is None: + return + state = "disable" if not updates.use_summary else "enable" + log.info("%s using syslog event summary as event message ", state) + self._checksums.use_summary = updates.checksums.use_summary + self._processor.use_summary = updates.use_summary + + def _process_parsers(self, updates): + if updates.checksums.parsers is None: + return + state = "updated" if self._checksums.parsers else "initial" + log.info("received %s syslog event parsers", state) + self._checksums.parsers = updates.checksums.parsers + self._parsers.update(updates.parsers) + + def _process_rules(self, updates): + if updates.checksums.rules is None: + return + state = "updated" if self._checksums.rules else "initial" + log.info("received %s event field filter rules", state) + self._checksums.rules = updates.checksums.rules + self._rules.update(updates.rules) diff --git a/Products/ZenEvents/zensyslog/loggers.py b/Products/ZenEvents/zensyslog/loggers.py new file mode 100644 index 0000000000..5124e5427e --- /dev/null +++ b/Products/ZenEvents/zensyslog/loggers.py @@ -0,0 +1,105 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import time + +from Products.ZenUtils.Utils import zenPath + +SYSLOG_DATE_FORMAT = "%b %d %H:%M:%S" +SAMPLE_DATE = "Apr 10 15:19:22" + + +class DropLogger(object): + """ + Messages are not written anywhere. + """ + + def log(self, message, address): + pass + + +class MessageLogger(object): + """ + Writes syslog messages to a log file. + """ + + def __init__(self, formatter): + self._formatter = formatter + self._log = _get_logger() + + def log(self, data, address): + message = self._formatter(data, address) + self._log.info(message) + + +def _get_logger(self): + log = logging.getLogger("origsyslog") + log.setLevel(logging.INFO) + log.propagate = False + filepath = zenPath("log/origsyslog.log") + handler = logging.FileHandler(filepath) + handler.setFormatter(logging.Formatter("%(message)s")) + log.addHandler(handler) + return log + + +class RawFormatter(object): + def __call__(self, data, address): + return data + + +class HumanFormatter(object): + """ + Expands a syslog message into a string format suitable for writing + to the filesystem such that it appears the same as it would + had the message been logged by the syslog daemon. + + @param msg: syslog message + @type msg: string + @param client_address: IP info of the remote device (ipaddr, port) + @type client_address: tuple of (string, number) + @return: message + @rtype: string + """ + + def __call__(self, data, address): + # pri := (facility * 8) + severity + stop = data.find(">") + + # check for a datestamp. default to right now if date not present + start = stop + 1 + stop = start + len(SAMPLE_DATE) + dateField = data[start:stop] + try: + date = time.strptime(dateField, SYSLOG_DATE_FORMAT) + year = time.localtime()[0] + date = (year,) + date[1:] + start = stop + 1 + except ValueError: + # date not present, so use today's date + date = time.localtime() + + # check for a hostname. default to localhost if not present + stop = data.find(" ", start) + if data[stop - 1] == ":": + hostname = address[0] + else: + hostname = data[start:stop] + start = stop + 1 + + # the message content + body = data[start:] + + # assemble the message + prettyTime = time.strftime(SYSLOG_DATE_FORMAT, date) + message = "%s %s %s" % (prettyTime, hostname, body) + return message diff --git a/Products/ZenEvents/zensyslog/processor.py b/Products/ZenEvents/zensyslog/processor.py new file mode 100644 index 0000000000..0e885fa95d --- /dev/null +++ b/Products/ZenEvents/zensyslog/processor.py @@ -0,0 +1,347 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2007, 2023 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import re +import time + +from collections import Sequence + +import six + +from Products.ZenEvents.EventServer import Stats +from Products.ZenEvents.ZenEventClasses import Error +from Products.ZenUtils.IpUtil import isip + +from . import rfc3164 + +log = logging.getLogger("zen.zensyslog.processor") + + +class SyslogProcessor(object): + """ + Class to process syslog messages and convert them into events viewable + in the Zenoss event console. + """ + + def __init__( + self, + sendEvent, + minpriority, + parsehost, + monitor, + parsers, + ): + """ + Initialize a SyslogProcessor instance. + + @param sendEvent: message from a remote host + @type sendEvent: string + @param minpriority: ignore anything under this priority + @type minpriority: integer + @param parsehost: hostname where this parser is running + @type parsehost: string + @param monitor: name of the distributed collector monitor + @type monitor: string + @param defaultPriority: priority to use if it can't be understood + from the received packet + @type defaultPriority: integer + @param syslogParsers: configureable syslog parsers + @type defaultPriority: list + """ + self.minpriority = minpriority + self.parsehost = parsehost + self.sendEvent = sendEvent + self.monitor = monitor + self.parsers = parsers + + # These are set as found on the EventManagerBase class. + self.use_summary = False + self._severity = rfc3164.Severity.Error + + self.stats = Stats() + + @property + def priority(self): + """Return the default syslog severity value.""" + return self._severity.value + + @priority.setter + def priority(self, value): + self._severity = rfc3164.Severity(value) + + def process(self, msg, ipaddr, host, rtime): + """ + Process an event from syslog and convert to a Zenoss event + + Returns either "EventSent" or "ParserDropped" + + @param msg: message from a remote host + @type msg: string + @param ipaddr: IP address of the remote host + @type ipaddr: string + @param host: remote host's name + @type host: string + @param rtime: time as reported by the remote host + @type rtime: string + """ + try: + fac, sev, dt, hostname, mesg = self._parse_message(msg) + except rfc3164.SyslogMessageError as ex: + log.error("bad syslog message: %s", ex) + return + + # Lower values mean higher severity/priority + if sev.value > self.minpriority: + log.debug("syslog severity below minimum value=%s", sev.value) + return + + event, drop = self._build_event(mesg, host, ipaddr, rtime, fac, sev) + if drop: + return drop + + self._maybe_add_originalTime(event, dt) + self._maybe_add_device(event, hostname) + self._maybe_use_summary_for_message(event, mesg) + self._maybe_overwrite_severity(event) + self._maybe_add_eventclasskey_value(event) + self._maybe_add_message(event, mesg) + + self._convert_to_unicode(event) + + self.sendEvent(event) + self.stats.add(time.time() - rtime) + return "EventSent" + + def _parse_message(self, message): + fac, sev, dt, hostname, mesg = rfc3164.parse(message) + + # Use default severity if a severity was not found in message + sev = sev if sev else self._severity + + return (fac, sev, dt, hostname, mesg) + + def _build_event(self, mesg, host, ipaddr, rtime, fac, sev): + fields, index, drop = parse_MSG(mesg, self.parsers) + if drop: + return (None, "ParserDropped") + + event = { + "device": host, + "monitor": self.monitor, + "ipAddress": ipaddr, + "firstTime": rtime, + "lastTime": rtime, + "eventGroup": "syslog", + "facility": fac.value if fac else None, + "priority": sev.value, + "severity": sev.as_event_severity(), + "parserRuleMatched": index, + } + event.update(fields) + return (event, None) + + def _maybe_add_originalTime(self, event, dt): + if dt: + event["originalTime"] = dt.strftime("%b %d %H:%M:%S") + + def _maybe_add_device(self, event, hostname): + if self.parsehost and hostname: + event["device"] = hostname + if isip(hostname): + event["ipAddress"] = hostname + else: + del event["ipAddress"] + + def _maybe_use_summary_for_message(self, event, mesg): + if self.use_summary: + event["message"] = event.get("summary", "") + event["unparsedMessage"] = mesg + + def _maybe_overwrite_severity(self, event): + if "overwriteSeverity" not in event: + return + overwrite_v = int(event["overwriteSeverity"]) + overwrite = rfc3164.Severity(overwrite_v) + old_severity = event["severity"] + new_severity = overwrite.as_event_severity() + log.debug( + "Severity overwritten in message tag. Previous:%s Current:%s", + old_severity, + new_severity, + ) + event["severity"] = new_severity + + def _maybe_add_eventclasskey_value(self, event): + value = getEventClassKeyValue(event) + if value: + event["eventClassKey"] = value + + def _maybe_add_message(self, event, mesg): + if "message" not in event: + event["message"] = mesg + + def _convert_to_unicode(self, event): + # Convert strings to unicode, previous code converted 'summary' & + # 'message' fields. With parsing group name matching, good idea to + # convert all fields. + event.update( + { + k: six.text_type(v) + for k, v in event.iteritems() + if isinstance(v, six.binary_type) + } + ) + + +def parse_MSG(msg, parsers): + """ + Parse the RFC-3164 tag of the syslog message using the regex defined + at the top of this module. + + @param msg: message from host + @type msg: string + @return: dictionary of event properties + @type: dictionary + """ + log.debug("[parsed_Tag] message=%s", msg) + fields = {} + for i, parser in enumerate(parsers): + log.debug("parser[%s] regex: %s", i, parser.pattern) + result = parser.parse(msg) + if result is None: + continue + if not parser.keep: + log.debug( + "parser[%s] matched but DROPPED due to parser. " + "msg:%r, pattern:%r, parsedGroups:%r", + i, + msg, + parser.pattern, + result, + ) + return None, -1, True + log.debug( + "parser[%s] matched. msg:%r, pattern:%r, parsedGroups:%r", + i, + msg, + parser.pattern, + result, + ) + return result, i, False + else: + log.debug("No matching parser: %r", msg) + fields["summary"] = msg + return fields, -1, False + + +def getEventClassKeyValue(evt): + """ + Build the key used to find an events dictionary record. If eventClass + is defined it is used. For NT events "Source_Evid" is used. For other + syslog events we use the summary of the event to perform a full text + or'ed search. + + @param evt: dictionary of event properties + @type evt: dictionary + @return: dictionary of event properties + @type: dictionary + """ + if "eventClassKey" in evt or "eventClass" in evt: + return None + + if "ntevid" in evt: + value = "{component}_{ntevid}".format(**evt) + elif "component" in evt: + value = evt["component"] + else: + value = None + + if value: + try: + value = value.decode("latin-1") + except Exception: + value = value.decode("utf-8") + + return value + + +_parser_error_event = { + "device": "127.0.0.1", + "eventClass": "/App/Zenoss", + "severity": Error, + "eventClassKey": "", + "summary": "Syslog Parser processing issue", + "component": "zensyslog", +} + + +class _Parser(object): + __slots__ = ("_matcher", "keep") + + def __init__(self, matcher, keep): + self._matcher = matcher + self.keep = keep + + @property + def pattern(self): + return self._matcher.pattern + + def parse(self, text): + m = self._matcher.search(text) + return m.groupdict() if m else None + + +class Parsers(Sequence): + def __init__(self, sendevent): + self._sendevent = sendevent + self._parsers = [] + + def __getitem__(self, offset): + return self._parsers[offset] + + def __len__(self): + return len(self._parsers) + + def update(self, source): + parsers = [] + for idx, spec in enumerate(source): + if "expr" not in spec: + msg = ( + 'Parser configuration #{} missing a "expr" attribute' + ).format(idx) + log.warn(msg) + self._send_error_event(message=msg) + continue + try: + matcher = re.compile(spec["expr"], re.DOTALL) + parser = _Parser(matcher, spec["keep"]) + except Exception as ex: + msg = ( + "Parser configuration #{} Could not compile expression " + '"{!r}", {!r}' + ).format(idx, spec["expr"], ex) + log.warn(msg) + self._send_error_event(message=msg) + else: + parsers.append(parser) + self._parsers[:] = parsers + + def _send_error_event(self, **kwargs): + """ + Build an Event dict from parameters.n + """ + if kwargs: + event = _parser_error_event.copy() + event.update(kwargs) + else: + event = _parser_error_event + self._sendevent(event) diff --git a/Products/ZenEvents/zensyslog/protocol.py b/Products/ZenEvents/zensyslog/protocol.py new file mode 100644 index 0000000000..3530973d04 --- /dev/null +++ b/Products/ZenEvents/zensyslog/protocol.py @@ -0,0 +1,76 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import time + +from twisted.internet import defer +from twisted.internet.protocol import DatagramProtocol + +from Products.ZenUtils.IpUtil import asyncNameLookup + +log = logging.getLogger("zen.zensyslog.protocol") + + +class SyslogProtocol(DatagramProtocol): + """ + Implementation to listen for syslog messages. + """ + + def __init__(self, processor, messagelogger, counters, noreverselookup): + self._processor = processor + self._messagelogger = messagelogger + self._counters = counters + self._gethostname = ( + defer.succeed if noreverselookup else asyncNameLookup + ) + + def datagramReceived(self, packet, address): + """ + Consume the network packet + + @param data: syslog message + @type data: string + @param address: IP info of the remote device (ipaddr, port) + @type address: tuple of (string, number) + """ + if packet == "": + log.debug("received empty datagram. Discarding.") + return + log.debug("received packet from %s -> %s", address, packet) + self._messagelogger.log(packet, address) + + (ipaddr, port) = address + d = self._gethostname(ipaddr) + data = (packet, ipaddr, time.time()) + d.addCallback(self._handle_message, data) + d.addErrback(self._convert_error, data) + + def doStop(self): + log.info("stop receiving syslog messages") + + def _convert_error(self, error, data): + # On failure, use the ip address as the hostname. + self._handle_message(data[1], data) + + def _handle_message(self, hostname, data): + """ + Send the resolved address, if possible, and the event via the thread + + @param response: Twisted response + @type response: Twisted response + @param data: (msg, ipaddr, rtime) + @type data: tuple of (string, string, datetime object) + """ + (packet, ipaddr, rtime) = data + result = self._processor.process(packet, ipaddr, hostname, rtime) + if result == "ParserDropped": + self._counters["eventParserDroppedCount"] += 1 diff --git a/Products/ZenEvents/zensyslog/receiver.py b/Products/ZenEvents/zensyslog/receiver.py new file mode 100644 index 0000000000..100d0cda16 --- /dev/null +++ b/Products/ZenEvents/zensyslog/receiver.py @@ -0,0 +1,75 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2008, 2011, 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import os +import socket + +from twisted.internet import reactor + +log = logging.getLogger("zen.zensyslog.receiver") + + +class Receiver(object): + """ + Listens for syslog messages and turns them into Zenoss events. + """ + + def __init__(self, protocol, portfactory): + self._protocol = protocol + self._portfactory = portfactory + self._port = None + + def start(self): + self._port = self._portfactory(self._protocol) + + def stop(self): + if self._port is None: + return + self._port.stopListening() + self._port = None + + +class CreatePort(object): + def __init__(self, port, interface): + self._port = port + self._interface = interface + + def __call__(self, protocol): + return reactor.listenUDP( + self._port, protocol, interface=self._interface + ) + + +class AdoptPort(object): + def __init__(self, fd): + self._fd = fd + + def __call__(self, protocol): + # Create a datagram socket from the specific file descriptor + sock = socket.fromfd(self._fd, socket.AF_INET, socket.SOCK_DGRAM) + + # No longer need the file descriptor; `fromfd` created a duplicate. + os.close(self._fd) + del self._fd + + # Set the socket non-blocking + sock.setblocking(False) + + try: + # Adopt the socket and keep a reference to the IListeningPort. + return reactor.adoptDatagramPort( + sock.fileno(), socket.AF_INET, protocol + ) + finally: + # No longer need the socket; + # `adoptDatagramPort` created a duplicate. + sock.close() diff --git a/Products/ZenEvents/zensyslog/rfc3164/__init__.py b/Products/ZenEvents/zensyslog/rfc3164/__init__.py new file mode 100644 index 0000000000..83363a2019 --- /dev/null +++ b/Products/ZenEvents/zensyslog/rfc3164/__init__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from .parser import parse, SyslogMessageError +from .severity import Severity + +__all__ = ("parse", "Severity", "SyslogMessageError") diff --git a/Products/ZenEvents/zensyslog/rfc3164/facility.py b/Products/ZenEvents/zensyslog/rfc3164/facility.py new file mode 100644 index 0000000000..5a9217c0ec --- /dev/null +++ b/Products/ZenEvents/zensyslog/rfc3164/facility.py @@ -0,0 +1,71 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from enum import IntEnum + + +class Facility(IntEnum): + kernel = 0 + user = 1 + mail = 2 + system = 3 + security4 = 4 + syslogd = 5 + printer = 6 + network_news = 7 + uucp = 8 + clock9 = 9 + security10 = 10 + ftp = 11 + ntp = 12 + log_audit = 13 + log_alert = 14 + clock15 = 15 + local0 = 16 + local1 = 17 + local2 = 18 + local3 = 19 + local4 = 20 + local5 = 21 + local6 = 22 + local7 = 23 + + @property + def description(self): + return _descriptions.get(self.value) + + +_descriptions = { + 0: "kernel messages", + 1: "user-level messages", + 2: "mail system", + 3: "system daemons", + 4: "security/authorization messages", + 5: "messages generated internally by syslogd", + 6: "line printer subsystem", + 7: "network news subsystem", + 8: "UUCP subsystem", + 9: "clock daemon", + 10: "security/authorization messages", + 11: "FTP daemon", + 12: "NTP subsystem", + 13: "log audit", + 14: "log alert", + 15: "clock daemon", + 16: "local use 0 (local0)", + 17: "local use 1 (local1)", + 18: "local use 2 (local2)", + 19: "local use 3 (local3)", + 20: "local use 4 (local4)", + 21: "local use 5 (local5)", + 22: "local use 6 (local6)", + 23: "local use 7 (local7)", +} diff --git a/Products/ZenEvents/zensyslog/rfc3164/parser.py b/Products/ZenEvents/zensyslog/rfc3164/parser.py new file mode 100644 index 0000000000..6fe55fea84 --- /dev/null +++ b/Products/ZenEvents/zensyslog/rfc3164/parser.py @@ -0,0 +1,121 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging +import re + +import dateutil.parser + +from .facility import Facility +from .severity import Severity + +log = logging.getLogger("zen.zensyslog.parser") + + +class SyslogMessageError(ValueError): + """Raised when the syslog message has bad values""" + + +def parse(message): + """ + Return a parsed syslog (RFC 3164) message. + + Return a tuple having four elements: + + (facility, severity, datetime, hostname, message) + + The 'message' is the remaining content of the original message + minus the 'facility', 'severity', 'datetime', and 'hostname' parts. + """ + start = 0 + start, facility, severity = _extract_pri(start, message) + start, dt = _extract_timestamp(start, message) + start, hostname = _extract_hostname(start, message) + return (facility, severity, dt, hostname, message[start:].strip()) + + +def _extract_pri(start, mesg): + """ + Parse RFC-3164 PRI part of syslog message to get facility and priority. + + Returns a tuple containing a dict of the parsed fields and unparsed + portion of the syslog message string. + + @param msg: message from host + @type msg: string + @return: tuple of dictionary of event properties and the message + @type: (dictionary, string) + """ + if mesg[start:1] == "<": + posn = mesg.find(">") + pvalue = mesg[start + 1 : posn] + try: + pvalue = int(pvalue) + except ValueError: + raise SyslogMessageError( + "Found '{}' instead of a number for priority".format(pvalue) + ) + fac, sev = divmod(pvalue, 8) + try: + facility = Facility(fac) + except ValueError: + raise SyslogMessageError("Invalid facility value '{}'".format(fac)) + try: + severity = Severity(sev) + except ValueError: + raise SyslogMessageError("Invalid severity value '{}'".format(sev)) + return (posn + 1, facility, severity) + + if mesg and mesg[start] < " ": + sev = ord(mesg[start]) + try: + severity = Severity(sev) + except ValueError: + raise SyslogMessageError("Invalid severity value '{}'".format(sev)) + return (start + 1, Facility.kernel, severity) + + log.debug("no priority found in message") + return (start, None, None) + + +_match_timestamp = re.compile( + "^(\S{3} [\d ]{2} [\d ]{2}:[\d ]{2}:[\d ]{2}(?:\.\d{1,3})?)", re.DOTALL +).search + + +def _extract_timestamp(start, mesg): + m = _match_timestamp(mesg[start:]) + if not m: + log.debug("no timestamp found in message") + return (start, None) + ts = m.group(0) + try: + dt = dateutil.parser.parse(ts) + except ValueError: + raise SyslogMessageError("Invalid timestamp '{}'".format(ts)) + else: + return (start + len(ts) + 1, dt) + + +_not_hostname = re.compile(r"[\[:]").search + + +def _extract_hostname(start, mesg): + offset = mesg[start:].find(" ") + if offset < 0: + log.debug("unexpected end of message") + return start, None + hostname = mesg[start : start + offset] + if _not_hostname(hostname): + log.debug("no hostname found in message") + return start, None + hostname = hostname.split("@", 1)[-1] + return (start + offset), hostname diff --git a/Products/ZenEvents/zensyslog/rfc3164/severity.py b/Products/ZenEvents/zensyslog/rfc3164/severity.py new file mode 100644 index 0000000000..8ea453f17e --- /dev/null +++ b/Products/ZenEvents/zensyslog/rfc3164/severity.py @@ -0,0 +1,55 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024 all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from enum import IntEnum + +from Products.ZenEvents import ZenEventClasses as _zec + + +class Severity(IntEnum): + Emergency = 0 + Alert = 1 + Critical = 2 + Error = 3 + Warning = 4 + Notice = 5 + Informational = 6 + Debug = 7 + + @property + def description(self): + return _descriptions.get(self.value) + + def as_event_severity(self): + return _syslog_to_zenoss.get(self.value) + + +_descriptions = { + 0: "system is unusable", + 1: "action must be taken immediately", + 2: "critical conditions", + 3: "error conditions", + 4: "warning conditions", + 5: "normal but significant condition", + 6: "informational messages", + 7: "debug-level messages", +} + +_syslog_to_zenoss = { + 0: _zec.Critical, + 1: _zec.Critical, + 2: _zec.Critical, + 3: _zec.Error, + 4: _zec.Warning, + 5: _zec.Info, + 6: _zec.Info, + 7: _zec.Debug, +} diff --git a/Products/ZenEvents/zensyslog/tests/__init__.py b/Products/ZenEvents/zensyslog/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenEvents/zensyslog/tests/test_processor.py b/Products/ZenEvents/zensyslog/tests/test_processor.py new file mode 100644 index 0000000000..d3d63740d3 --- /dev/null +++ b/Products/ZenEvents/zensyslog/tests/test_processor.py @@ -0,0 +1,208 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2008, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from unittest import TestCase + +import six + +from Products.ZenEvents.zensyslog.processor import ( + getEventClassKeyValue, + Parsers, + parse_MSG, +) +from Products.ZenEvents.EventManagerBase import EventManagerBase + + +class TestGetEventClassKeyValue(TestCase): + base = {"device": "localhost", "component": "component", "severity": 3} + + def setUp(t): + logging.getLogger().setLevel(logging.CRITICAL + 10) + + def tearDown(t): + logging.getLogger().setLevel(logging.NOTSET) + + def test_empty(t): + empty = {} + result = getEventClassKeyValue(empty.copy()) + t.assertIsNone(result) + + def test_eventClassKey(t): + evt = dict(eventClassKey="akey", **t.base) + result = getEventClassKeyValue(evt.copy()) + t.assertIsNone(result) + + def test_eventClassKey_and_ntevid(t): + evt = dict(eventClassKey="akey", ntevid="1234", **t.base) + result = getEventClassKeyValue(evt.copy()) + t.assertIsNone(result) + + def test_ntevid(t): + evt = dict(ntevid="1234", **t.base) + result = getEventClassKeyValue(evt.copy()) + t.assertEqual(result, "component_1234") + + def test_default(t): + evt = dict(**t.base) + result = getEventClassKeyValue(evt.copy()) + t.assertEqual(result, "component") + + +class TestParseMSG(TestCase): + def setUp(t): + logging.getLogger().setLevel(logging.CRITICAL + 10) + t.parsers = Parsers(t.sendEvent) + t.parsers.update(EventManagerBase.syslogParsers) + + def tearDown(t): + del t.parsers + logging.getLogger().setLevel(logging.NOTSET) + + def sendEvent(t, evt): + "Fakeout sendEvent() method" + t.sent = evt + + def test_msg_content(t): + long_text_message = ("long text message " * 20).strip() + msg = ( + "2016-08-08T11:07:33.660820-04:00 devname=localhost " + "log_id=98765434 type=component {}" + ).format(long_text_message) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertEqual(index, -1) + t.assertDictEqual(fields, {"summary": six.text_type(msg)}) + + def testCheckFortigate(t): + """ + Test of Fortigate syslog message parsing + """ + key = "987654321" + comp = "myComponent" + msg = ( + "date=xxxx devname=blue log_id={} type={} " "blah blah blah" + ).format(key, comp) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("eventClassKey"), key) + t.assertEqual(fields.get("component"), comp) + t.assertEqual( + fields.get("summary"), + "devname=blue log_id=987654321 type=myComponent blah blah blah", + ) + + def testCheckCiscoPortStatus(t): + """ + Test of Cisco port status syslog message parsing + """ + msg = ( + "Process 10532, Nbr 192.168.10.13 on GigabitEthernet2/15 " + "from LOADING to FULL, Loading Done" + ) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("process_id"), "10532") + t.assertEqual(fields.get("interface"), "GigabitEthernet2/15") + t.assertEqual(fields.get("start_state"), "LOADING") + t.assertEqual(fields.get("end_state"), "FULL") + t.assertEqual(fields.get("summary"), "Loading Done") + + def testCiscoVpnConcentrator(t): + """ + Test of Cisco VPN Concentrator syslog message parsing + """ + msg = ( + "54884 05/25/2009 13:41:14.060 SEV=3 HTTP/42 RPT=4623 " + "Error on socket accept." + ) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("eventClassKey"), "HTTP/42") + t.assertEqual(fields.get("summary"), "Error on socket accept.") + + def testCiscoStandardMessageSeverity(t): + """ + Test that the event severity is correctly extracted from the + Cisco standard message body + """ + msg = ( + "2014 Jan 31 19:45:51 R2-N6K1-2010-P1 " + "%ETH_PORT_CHANNEL-5-CREATED: port-channel1 created" + ) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("overwriteSeverity"), "5") + + def testDellSyslog(t): + """ + Test dell stuf + """ + msg = ( + "1-Oct-2009 23:00:00.383809:snapshotDelete.cc:290:INFO:8.2.5:" + "Successfully deleted snapshot " + "'UNVSQLCLUSTERTEMPDB-2009-09-30-23:00:14.11563'." + ) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("eventClassKey"), "8.2.5") + t.assertEqual( + fields.get("summary"), + "Successfully deleted snapshot " + "'UNVSQLCLUSTERTEMPDB-2009-09-30-23:00:14.11563'.", + ) + + def testDellSyslog2(t): + """ + Test dell stuf + """ + msg = ( + "2626:48:VolExec:27-Aug-2009 " + "13:15:58.072049:VE_VolSetWorker.hh:75:WARNING:43.3.2:Volume " + "volumeName has reached 96 percent of its reported size and " + "is currently using 492690MB." + ) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("eventClassKey"), "43.3.2") + t.assertEqual( + fields.get("summary"), + "Volume volumeName has reached 96 percent of its reported size " + "and is currently using 492690MB.", + ) + + def testNetAppSyslogParser(t): + """ + Test NetApp syslog parser. + """ + msg = ( + "[deviceName: 10/100/1000/e1a:warning]: Client 10.0.0.101 " + "(xid 4251521131) is trying to access an unexported mount " + "(fileid 64, snapid 0, generation 6111516 and flags 0x0 on " + "volume 0xc97d89a [No volume name available])" + ) + fields, index, drop = parse_MSG(msg, t.parsers) + t.assertFalse(drop) + t.assertTrue(index >= 0) + t.assertEqual(fields.get("component"), "10/100/1000/e1a") + t.assertEqual( + fields.get("summary"), + "Client 10.0.0.101 (xid 4251521131) is trying to access an " + "unexported mount (fileid 64, snapid 0, generation 6111516 " + "and flags 0x0 on volume 0xc97d89a [No volume name available])", + ) diff --git a/Products/ZenEvents/zensyslog/tests/test_transformer.py b/Products/ZenEvents/zensyslog/tests/test_transformer.py new file mode 100644 index 0000000000..81b925e31d --- /dev/null +++ b/Products/ZenEvents/zensyslog/tests/test_transformer.py @@ -0,0 +1,78 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import logging +import collections + +from unittest import TestCase +from mock import Mock + +from Products.ZenHub.interfaces import TRANSFORM_CONTINUE, TRANSFORM_DROP + +from Products.ZenEvents.EventManagerBase import EventManagerBase +from Products.ZenEvents.zensyslog.transformer import ( + FilterRules, + SyslogMsgFilter, +) + + +class SyslogMsgFilterTest(TestCase): + def setUp(t): + logging.getLogger().setLevel(logging.CRITICAL + 10) + + def tearDown(t): + logging.getLogger().setLevel(logging.NOTSET) + + def testDefaultFilterRules(self): + app = Mock() + rules = FilterRules(app) + rules.update(EventManagerBase.syslogMsgEvtFieldFilterRules) + self.assertEquals(app.sendEvent.called, False) + + def testBadFilter(self): + filterCfg = {"eventClassKey": ["(BadBad"]} + app = Mock() + rules = FilterRules(app) + rules.update(filterCfg) + self.assertEqual(len(rules), 0) + self.assertTrue(app.sendEvent.called) + self.assertEquals(app.sendEvent.call_count, 1) + evtFields = app.sendEvent.mock_calls[0][1][0] + self.assertEquals( + evtFields["message"], + "Syslog Message Filter configuration for the 'eventClassKey' " + "event field could not compile rule #0 with the expression " + "of '(BadBad'. Error error('unbalanced parenthesis',)", + ) + + def testSyslogMsgFilterMatch(self): + filterCfg = {"eventClassKey": ["MARK"]} + event = { + "severity": 4, + "eventClassKey": "MARK", + "component": "zensyslog", + "summary": "test message", + "eventKey": "SyslogMessageFilter.eventClassKey.0", + "device": "127.0.0.1", + "eventClass": "/App/Zenoss", + "message": "test test 123", + } + app = Mock() + rules = FilterRules(app) + counters = collections.Counter() + counters["eventCount"] = 0 + counters["eventFilterDroppedCount"] = 0 + transformer = SyslogMsgFilter(rules, counters) + rules.update(filterCfg) + self.assertFalse(app.sendEvent.called) + result = transformer.transform(event) + self.assertEquals(result, TRANSFORM_DROP) + event["eventClassKey"] = "NotMark" + result = transformer.transform(event) + self.assertEquals(result, TRANSFORM_CONTINUE) diff --git a/Products/ZenEvents/zensyslog/transformer.py b/Products/ZenEvents/zensyslog/transformer.py new file mode 100644 index 0000000000..35097eb876 --- /dev/null +++ b/Products/ZenEvents/zensyslog/transformer.py @@ -0,0 +1,146 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import re + +from collections import Mapping + +from zope.interface import implementer + +from Products.ZenEvents.ZenEventClasses import Error +from Products.ZenHub.interfaces import ( + ICollectorEventTransformer, + TRANSFORM_CONTINUE, + TRANSFORM_DROP, +) + +log = logging.getLogger("zen.zensyslog.transformer") + +_rule_error_event = { + "device": "127.0.0.1", + "eventClass": "/App/Zenoss", + "severity": Error, + "eventClassKey": "", + "summary": "Syslog Message Filter processing issue", + "component": "zensyslog", +} + + +@implementer(ICollectorEventTransformer) +class SyslogMsgFilter(object): + """ + Interface used to perform filtering of events at the collector. + This could be used to drop events, transform event content, etc. + + These transformers are run sequentially before a fingerprint is generated + for the event, so they can set fields which are used by an + ICollectorEventFingerprintGenerator. + + The priority of the event transformer (the transformers are executed in + ascending order using the weight of each filter). + """ + + weight = 1 + + def __init__(self, rules, counters): + self._rules = rules + self._counters = counters + + def transform(self, event): + """ + Performs any transforms of the specified event at the collector. + + @param event: The event to transform. + @type event: dict + @return: Returns TRANSFORM_CONTINUE if this event should be forwarded + on to the next transformer in the sequence, TRANSFORM_STOP if no + further transformers should be performed on this event, and + TRANSFORM_DROP if the event should be dropped. + @rtype: int + """ + relevant_rules = ( + (k, v) for k, v in self._rules.iteritems() if k in event + ) + for name, matchers in relevant_rules: + value = event.get(name) + for idx, matcher in enumerate(matchers): + matched = matcher.search(value) + if not matched: + continue + log.debug( + "drop syslog message! " + "EventFieldName:%r " + "EventFieldValue:%r " + "FilterRuleNumber:%s " + "FilterRuleExpression:%r", + name, + value, + idx, + matcher.pattern, + ) + self._counters["eventFilterDroppedCount"] += 1 + return TRANSFORM_DROP + else: + return TRANSFORM_CONTINUE + + +class FilterRules(Mapping): + """ + Rules for syslog message filtering. + """ + + def __init__(self, app): + self._app = app + self._rules = {} + + def __getitem__(self, key): + return self._rules[key] + + def __iter__(self): + return iter(self._rules) + + def __len__(self): + return len(self._rules) + + def update(self, source): + rules = {} + for name, ruledefs in source.iteritems(): + for idx, ruledef in enumerate(ruledefs): + try: + compiledRule = re.compile(ruledef, re.DOTALL) + except Exception as ex: + msg = ( + "Syslog Message Filter configuration for the " + "{!r} event field could not compile rule #{!r}" + " with the expression of {!r}. Error {!r}".format( + name, idx, ruledef, ex + ) + ) + log.warn(msg) + self._send_error_event( + message=msg, + eventKey="SyslogMessageFilter.{}.{}".format(name, idx), + ) + else: + rules.setdefault(name, []).append(compiledRule) + self._rules = rules + + def _send_error_event(self, **kwargs): + """ + Build an Event dict from parameters.n + """ + if kwargs: + event = _rule_error_event.copy() + event.update(kwargs) + else: + event = _rule_error_event + self._app.sendEvent(event) diff --git a/Products/ZenEvents/zentrap.py b/Products/ZenEvents/zentrap.py deleted file mode 100644 index 3eebca92f6..0000000000 --- a/Products/ZenEvents/zentrap.py +++ /dev/null @@ -1,1042 +0,0 @@ -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2007, 2011-2012, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -"""zentrap - -Creates events from SNMP Traps. -Currently a wrapper around the Net-SNMP C library. -""" - -import base64 -import ctypes as c # Magical interfacing with C code -import errno -import logging -import socket -import sys -import time - -from collections import defaultdict -from ipaddr import IPAddress -from struct import unpack - -from pynetsnmp import netsnmp, twistedsnmp -from twisted.internet import defer, reactor -from twisted.python.failure import Failure -from zope.component import queryUtility, getUtility, provideUtility -from zope.interface import implementer - -from zenoss.protocols.protobufs.zep_pb2 import SEVERITY_WARNING - - -from Products.ZenCollector.daemon import CollectorDaemon -from Products.ZenCollector.interfaces import ( - ICollector, ICollectorPreferences, IEventService, - IScheduledTask, IStatisticsService -) -from Products.ZenCollector.services.config import DeviceProxy -from Products.ZenCollector.tasks import ( - SimpleTaskFactory, SimpleTaskSplitter, BaseTask, TaskStates -) -from Products.ZenEvents.EventServer import Stats -from Products.ZenEvents.TrapFilter import TrapFilter, TrapFilterError -from Products.ZenEvents.ZenEventClasses import Clear, Critical -from Products.ZenHub.interfaces import ICollectorEventTransformer -from Products.ZenHub.services.SnmpTrapConfig import User -from Products.ZenUtils.captureReplay import CaptureReplay -from Products.ZenUtils.observable import ObservableMixin -from Products.ZenUtils.Utils import unused - -unused(DeviceProxy, User) - -log = logging.getLogger("zen.zentrap") - -# This is what struct sockaddr_in {} looks like -family = [('family', c.c_ushort)] -if sys.platform == 'darwin': - family = [('len', c.c_ubyte), ('family', c.c_ubyte)] - - -class sockaddr_in(c.Structure): - _fields_ = family + [ - ('port', c.c_ubyte * 2), # need to decode from net-byte-order - ('addr', c.c_ubyte * 4), - ] - - -class sockaddr_in6(c.Structure): - _fields_ = family + [ - ('port', c.c_ushort), # need to decode from net-byte-order - ('flow', c.c_ubyte * 4), - ('addr', c.c_ubyte * 16), - ('scope_id', c.c_ubyte * 4), - ] - - -_pre_parse_factory = c.CFUNCTYPE( - c.c_int, - c.POINTER(netsnmp.netsnmp_session), - c.POINTER(netsnmp.netsnmp_transport), - c.c_void_p, - c.c_int -) - -# teach python that the return type of snmp_clone_pdu is a pdu pointer -netsnmp.lib.snmp_clone_pdu.restype = netsnmp.netsnmp_pdu_p - -# Version codes from the PDU -SNMPv1 = 0 -SNMPv2 = 1 -SNMPv3 = 3 - -LEGACY_VARBIND_COPY_MODE = 0 -DIRECT_VARBIND_COPY_MODE = 1 -MIXED_VARBIND_COPY_MODE = 2 - -class FakePacket(object): - """ - A fake object to make packet replaying feasible. - """ - def __init__(self): - self.fake = True - - -@implementer(ICollectorPreferences) -class SnmpTrapPreferences(CaptureReplay): - - def __init__(self): - """ - Initializes a SnmpTrapPreferences instance and provides - default values for needed attributes. - """ - self.collectorName = 'zentrap' - self.configCycleInterval = 20 # minutes - self.cycleInterval = 5 * 60 # seconds - - # The configurationService attribute is the fully qualified class-name - # of our configuration service that runs within ZenHub - self.configurationService = 'Products.ZenHub.services.SnmpTrapConfig' - - # Will be filled in based on buildOptions - self.options = None - - self.configCycleInterval = 20 * 60 - self.task = None - - def postStartupTasks(self): - self.task = TrapTask('zentrap', configId='zentrap') - yield self.task - - def buildOptions(self, parser): - """ - Command-line options to be supported - """ - TRAP_PORT = 162 - try: - TRAP_PORT = socket.getservbyname('snmptrap', 'udp') - except socket.error: - pass - parser.add_option( - '--trapport', '-t', - dest='trapport', type='int', default=TRAP_PORT, - help="Listen for SNMP traps on this port rather than the default" - ) - parser.add_option( - '--useFileDescriptor', - dest='useFileDescriptor', type='int', default=None, - help="Read from an existing connection " - "rather than opening a new port." - ) - parser.add_option( - '--trapFilterFile', - dest='trapFilterFile', type='string', default=None, - help="File that contains trap oids to keep, " - "should be in $ZENHOME/etc." - ) - parser.add_option( - '--varbindCopyMode', - dest='varbindCopyMode', type='int', default=2, - help='Varbind copy mode. Possible values: ' - '0 - the varbinds are copied into event as one field and ifIndex field is added. ' - '1 - the varbinds are copied into event as several fields and sequence field is added. ' - '2 - the mixed mode. Uses varbindCopyMode=0 behaviour if there is only one occurrence ' - 'of the varbind, otherwise uses varbindCopyMode=1 behaviour' - ) - - self.buildCaptureReplayOptions(parser) - - def postStartup(self): - # Ensure that we always have an oidMap - daemon = getUtility(ICollector) - daemon.oidMap = {} - # add our collector's custom statistics - statService = queryUtility(IStatisticsService) - statService.addStatistic("events", "COUNTER") - - -def ipv6_is_enabled(): - """test if ipv6 is enabled - """ - # hack for ZEN-12088 - TODO: remove next line - return False - try: - socket.socket(socket.AF_INET6, socket.SOCK_DGRAM, 0) - except socket.error as e: - if e.errno == errno.EAFNOSUPPORT: - return False - raise - return True - - -class _LegacyVarbindProcessor(object): - - def __init__(self, oid2name): - self.oid2name = oid2name - - def __call__(self, varbinds): - result = defaultdict(list) - for oid, value in varbinds: - base_name = self.oid2name(oid, exactMatch=False, strip=True) - full_name = self.oid2name(oid, exactMatch=False, strip=False) - result[base_name].append(str(value)) - if base_name != full_name: - suffix = full_name[len(base_name) + 1:] - result[base_name + ".ifIndex"].append(suffix) - return {name: ','.join(vals) for name, vals in result.iteritems()} - - -class _DirectVarbindProcessor(object): - - def __init__(self, oid2name): - self.oid2name = oid2name - - def __call__(self, varbinds): - result = defaultdict(list) - for oid, value in varbinds: - base_name = self.oid2name(oid, exactMatch=False, strip=True) - full_name = self.oid2name(oid, exactMatch=False, strip=False) - result[full_name].append(str(value)) - if base_name != full_name: - suffix = full_name[len(base_name) + 1:] - result[base_name + ".sequence"].append(suffix) - return {name: ','.join(vals) for name, vals in result.iteritems()} - - -class _MixedVarbindProcessor(object): - - def __init__(self, oid2name): - self.oid2name = oid2name - - def __call__(self, varbinds): - result = defaultdict(list) - groups = defaultdict(list) - - # Group varbinds having the same MIB Object name together - for key, value in varbinds: - base_name = self.oid2name(key, exactMatch=False, strip=True) - full_name = self.oid2name(key, exactMatch=False, strip=False) - groups[base_name].append((full_name, str(value))) - - # Process each MIB object by name - for base_name, data in groups.items(): - offset = len(base_name) + 1 - - # If there's only one instance for a given object, then add - # the varbind to the event details using pre Zenoss 6.2.0 rules. - if len(data) == 1: - full_name, value = data[0] - result[base_name].append(value) - - suffix = full_name[offset:] - if suffix: - result[base_name + ".ifIndex"].append(suffix) - continue - - # Record the varbind instance(s) in their 'raw' form. - for full_name, value in data: - suffix = full_name[offset:] - result[full_name].append(value) - if suffix: - result[base_name + ".sequence"].append(suffix) - return {name: ','.join(vals) for name, vals in result.iteritems()} - - -@implementer(IScheduledTask) -class TrapTask(BaseTask, CaptureReplay): - """ - Listen for SNMP traps and turn them into events - Connects to the TrapService service in zenhub. - """ - _varbind_processors = { - LEGACY_VARBIND_COPY_MODE: _LegacyVarbindProcessor, - DIRECT_VARBIND_COPY_MODE: _DirectVarbindProcessor, - MIXED_VARBIND_COPY_MODE: _MixedVarbindProcessor, - } - - def __init__( - self, taskName, configId, scheduleIntervalSeconds=3600, - taskConfig=None): - BaseTask.__init__( - self, taskName, configId, scheduleIntervalSeconds, taskConfig - ) - self.log = log - - # Needed for interface - self.name = taskName - self.configId = configId - self.state = TaskStates.STATE_IDLE - self.interval = scheduleIntervalSeconds - self._daemon = getUtility(ICollector) - self._eventService = queryUtility(IEventService) - self._preferences = self._daemon - self._statService = queryUtility(IStatisticsService) - # For compatibility with captureReplay - self.options = self._daemon.options - self.oidMap = self._daemon.oidMap - self.stats = Stats() - - # Command-line argument sanity checking - self.processCaptureReplayOptions() - self.session = None - self._replayStarted = False - self.varbindCopyMode = self.options.varbindCopyMode - - if self.varbindCopyMode not in [LEGACY_VARBIND_COPY_MODE, - DIRECT_VARBIND_COPY_MODE, - MIXED_VARBIND_COPY_MODE]: - self.varbindCopyMode = MIXED_VARBIND_COPY_MODE - self.log.warn( - "Wrong 'varbindCopyMode' value. 'varbindCopyMode=%s' will be used", - self.varbindCopyMode - ) - - processor_class = self._varbind_processors.get(self.varbindCopyMode) - self._process_varbinds = processor_class(self.oid2name) - - if not self.options.replayFilePrefix: - trapPort = self._preferences.options.trapport - if not self.options.useFileDescriptor and trapPort < 1024: - listen_ip = "ipv6" if ipv6_is_enabled() else "0.0.0.0" - # Makes call to zensocket here - # does an exec* so it never returns - self._daemon.openPrivilegedPort( - '--listen', - '--proto=udp', - '--port=%s:%d' % (listen_ip, trapPort) - ) - self.log("Unexpected return from openPrivilegedPort. Exiting.") - sys.exit(1) - - # Start listening for SNMP traps - self.log.info("Starting to listen on SNMP trap port %s", trapPort) - self.session = netsnmp.Session() - listening_protocol = "udp6" if ipv6_is_enabled() else "udp" - if self._preferences.options.useFileDescriptor is not None: - # open port 1162, but then dup fileno onto it - listening_address = listening_protocol + ':1162' - fileno = int(self._preferences.options.useFileDescriptor) - else: - listening_address = '%s:%d' % (listening_protocol, trapPort) - fileno = -1 - self._pre_parse_callback = _pre_parse_factory(self._pre_parse) - self.session.awaitTraps( - listening_address, fileno, self._pre_parse_callback, debug=True - ) - self.session.callback = self.receiveTrap - twistedsnmp.updateReactor() - - def doTask(self): - """ - This is a wait-around task since we really are called - asynchronously. - """ - if self.options.replayFilePrefix and not self._replayStarted: - log.debug("Replay starting...") - self._replayStarted = True - self.replayAll() - log.debug("Replay done...") - return - return defer.succeed("Waiting for SNMP traps...") - - def isReplaying(self): - """ - @returns True if we are replaying a packet instead of capturing one - """ - return len(self._preferences.options.replayFilePrefix) > 0 - - def getEnterpriseString(self, pdu): - """ - Get the enterprise string from the PDU or replayed packet - - @param pdu: raw packet - @type pdu: binary - @return: enterprise string - @rtype: string - """ - if hasattr(pdu, "fake"): # Replaying a packet - return pdu.enterprise - return '.'.join( - str(pdu.enterprise[i]) for i in range(pdu.enterprise_length) - ) - - def getResult(self, pdu): - """ - Get the values from the PDU or replayed packet - - @param pdu: raw packet - @type pdu: binary - @return: variables from the PDU or Fake packet - @rtype: dictionary - """ - if hasattr(pdu, "fake"): # Replaying a packet - return pdu.variables - return netsnmp.getResult(pdu, self.log) - - def getCommunity(self, pdu): - """ - Get the community string from the PDU or replayed packet - - @param pdu: raw packet - @type pdu: binary - @return: SNMP community - @rtype: string - """ - if hasattr(pdu, "fake"): # Replaying a packet - return pdu.community - elif pdu.community_len: - return c.string_at(pdu.community, pdu.community_len) - return '' - - def convertPacketToPython(self, addr, pdu): - """ - Store the raw packet for later examination and troubleshooting. - - @param addr: packet-sending host's IP address and port - @type addr: (string, number) - @param pdu: raw packet - @type pdu: binary - @return: Python FakePacket object - @rtype: Python FakePacket object - """ - packet = FakePacket() - packet.version = pdu.version - packet.host = addr[0] - packet.port = addr[1] - packet.variables = netsnmp.getResult(pdu, self.log) - packet.community = '' - packet.enterprise_length = pdu.enterprise_length - - # Here's where we start to encounter differences between packet types - if pdu.version == SNMPv1: - # SNMPv1 can't be received via IPv6 - packet.agent_addr = [pdu.agent_addr[i] for i in range(4)] - packet.trap_type = pdu.trap_type - packet.specific_type = pdu.specific_type - packet.enterprise = self.getEnterpriseString(pdu) - packet.community = self.getCommunity(pdu) - - return packet - - def replay(self, pdu): - """ - Replay a captured packet - - @param pdu: raw packet - @type pdu: binary - """ - ts = time.time() - self.asyncHandleTrap([pdu.host, pdu.port], pdu, ts) - - def oid2name(self, oid, exactMatch=True, strip=False): - """ - Returns a MIB name based on an OID and special handling flags. - - @param oid: SNMP Object IDentifier - @type oid: string - @param exactMatch: find the full OID or don't match - @type exactMatch: boolean - @param strip: show what matched, or matched + numeric OID remainder - @type strip: boolean - @return: Twisted deferred object - @rtype: Twisted deferred object - """ - if isinstance(oid, tuple): - oid = '.'.join(map(str, oid)) - - oid = oid.strip('.') - if exactMatch: - return self.oidMap.get(oid, oid) - - oidlist = oid.split('.') - for i in range(len(oidlist), 0, -1): - name = self.oidMap.get('.'.join(oidlist[:i]), None) - if name is None: - continue - - oid_trail = oidlist[i:] - if len(oid_trail) > 0 and not strip: - return "%s.%s" % (name, '.'.join(oid_trail)) - return name - - return oid - - def _pre_parse( - self, session, transport, transport_data, transport_data_length): - """Called before the net-snmp library parses the PDU. In the case - where a v3 trap comes in with unkwnown credentials, net-snmp silently - discards the packet. This method gives zentrap a way to log that these - packets were received to help with troubleshooting. - """ - if self.log.isEnabledFor(logging.DEBUG): - ipv6_socket_address = c.cast( - transport_data, c.POINTER(sockaddr_in6) - ).contents - if ipv6_socket_address.family == socket.AF_INET6: - self.log.debug( - "pre_parse: IPv6 %s", - socket.inet_ntop( - socket.AF_INET6, ipv6_socket_address.addr - ) - ) - elif ipv6_socket_address.family == socket.AF_INET: - ipv4_socket_address = c.cast( - transport_data, c.POINTER(sockaddr_in) - ).contents - self.log.debug( - "pre_parse: IPv4 %s", - socket.inet_ntop(socket.AF_INET, ipv4_socket_address.addr) - ) - else: - self.log.debug( - "pre_parse: unexpected address family: %s", - ipv6_socket_address.family - ) - return 1 - - def receiveTrap(self, pdu): - """ - Accept a packet from the network and spin off a Twisted - deferred to handle the packet. - - @param pdu: Net-SNMP object - @type pdu: netsnmp_pdu object - """ - if pdu.version not in (SNMPv1, SNMPv2, SNMPv3): - self.log.error("Unable to handle trap version %d", pdu.version) - return - if pdu.transport_data is None: - self.log.error("PDU does not contain transport data") - return - - ipv6_socket_address = c.cast( - pdu.transport_data, c.POINTER(sockaddr_in6) - ).contents - if ipv6_socket_address.family == socket.AF_INET6: - if pdu.transport_data_length < c.sizeof(sockaddr_in6): - self.log.error( - "PDU transport data is too small for sockaddr_in6 struct." - ) - return - ip_address = self.getPacketIp(ipv6_socket_address.addr) - elif ipv6_socket_address.family == socket.AF_INET: - if pdu.transport_data_length < c.sizeof(sockaddr_in): - self.log.error( - "PDU transport data is too small for sockaddr_in struct." - ) - return - ipv4_socket_address = c.cast( - pdu.transport_data, c.POINTER(sockaddr_in) - ).contents - ip_address = '.'.join(str(i) for i in ipv4_socket_address.addr) - else: - self.log.error( - "Got a packet with unrecognized network family: %s", - ipv6_socket_address.family - ) - return - - port = socket.ntohs(ipv6_socket_address.port) - self.log.debug("Received packet from %s at port %s", ip_address, port) - self.processPacket(ip_address, port, pdu, time.time()) - # update our total events stats - totalTime, totalEvents, maxTime = self.stats.report() - stat = self._statService.getStatistic("events") - stat.value = totalEvents - - def getPacketIp(self, addr): - """ - For IPv4, convert a pointer to 4 bytes to a dotted-ip-address - For IPv6, convert a pointer to 16 bytes to a canonical IPv6 address. - """ - - def _gen_byte_pairs(): - for left, right in zip(addr[::2], addr[1::2]): - yield "%.2x%.2x" % (left, right) - - v4_mapped_prefix = [0x00] * 10 + [0xff] * 2 - if addr[:len(v4_mapped_prefix)] == v4_mapped_prefix: - ip_address = '.'.join(str(i) for i in addr[-4:]) - else: - try: - basic_v6_address = ':'.join(_gen_byte_pairs()) - ip_address = str(IPAddress(basic_v6_address, 6)) - except ValueError: - self.log.warn("The IPv6 address is incorrect: %s", addr[:]) - ip_address = "::" - return ip_address - - def processPacket(self, ip_address, port, pdu, ts): - """ - Wrapper around asyncHandleTrap to process the provided packet. - - @param pdu: Net-SNMP object - @type pdu: netsnmp_pdu object - @param ts: time stamp - @type ts: datetime - """ - # At the end of this callback, pdu will be deleted, so copy it - # for asynchronous processing - dup = netsnmp.lib.snmp_clone_pdu(c.byref(pdu)) - if not dup: - self.log.error("Could not clone PDU for asynchronous processing") - return - - def cleanup(result): - """ - Twisted callback to delete a previous memory allocation - - @param result: Net-SNMP object - @type result: netsnmp_pdu object - @return: the result parameter - @rtype: binary - """ - netsnmp.lib.snmp_free_pdu(dup) - return result - - d = defer.maybeDeferred( - self.asyncHandleTrap, (ip_address, port), dup.contents, ts - ) - d.addBoth(cleanup) - - def snmpInform(self, addr, pdu): - """ - A SNMP trap can request that the trap recipient return back a response. - This is where we do that. - """ - reply = netsnmp.lib.snmp_clone_pdu(c.byref(pdu)) - if not reply: - self.log.error("Could not clone PDU for INFORM response") - raise RuntimeError("Cannot respond to INFORM PDU") - reply.contents.command = netsnmp.SNMP_MSG_RESPONSE - reply.contents.errstat = 0 - reply.contents.errindex = 0 - - # FIXME: might need to add udp6 for IPv6 addresses - sess = netsnmp.Session( - peername='%s:%d' % tuple(addr), version=pdu.version - ) - sess.open() - if not netsnmp.lib.snmp_send(sess.sess, reply): - netsnmp.lib.snmp_sess_perror( - "Unable to send inform PDU", self.session.sess - ) - netsnmp.lib.snmp_free_pdu(reply) - sess.close() - - def decodeSnmpv1(self, addr, pdu): - - result = {"snmpVersion": "1"} - result["device"] = addr[0] - - variables = self.getResult(pdu) - - self.log.debug("SNMPv1 pdu has agent_addr: %s", - str(hasattr(pdu, 'agent_addr'))) - - if hasattr(pdu, 'agent_addr'): - origin = '.'.join(str(i) for i in pdu.agent_addr) - result["device"] = origin - - enterprise = self.getEnterpriseString(pdu) - generic = pdu.trap_type - specific = pdu.specific_type - - result["snmpV1Enterprise"] = enterprise - result["snmpV1GenericTrapType"] = generic - result["snmpV1SpecificTrap"] = specific - - # Try an exact match with a .0. inserted between enterprise and - # specific OID. It seems that MIBs frequently expect this .0. - # to exist, but the device's don't send it in the trap. - result["oid"] = "%s.0.%d" % (enterprise, specific) - name = self.oid2name(result["oid"], exactMatch=True, strip=False) - - # If we didn't get a match with the .0. inserted we will try - # resolving with the .0. inserted and allow partial matches. - if name == result["oid"]: - result["oid"] = "%s.%d" % (enterprise, specific) - name = self.oid2name(result["oid"], exactMatch=False, strip=False) - - # Look for the standard trap types and decode them without - # relying on any MIBs being loaded. - eventType = { - 0: 'coldStart', - 1: 'warmStart', - 2: 'snmp_linkDown', - 3: 'snmp_linkUp', - 4: 'authenticationFailure', - 5: 'egpNeighorLoss', - 6: name, - }.get(generic, name) - - # Decode all variable bindings. Allow partial matches and strip - # off any index values. - varbinds = [] - for vb_oid, vb_value in variables: - vb_value = decode_snmp_value(vb_value) - vb_oid = '.'.join(map(str, vb_oid)) - if vb_value is None: - log.debug( - "[decodeSnmpv1] enterprise %s, varbind-oid %s, " - "varbind-value %s", enterprise, vb_oid, vb_value - ) - varbinds.append((vb_oid, vb_value)) - - result.update(self._process_varbinds(varbinds)) - - return eventType, result - - def decodeSnmpV2OrV3(self, addr, pdu): - eventType = 'unknown' - version = "2" if pdu.version == SNMPv2 else "3" - result = {"snmpVersion": version, "oid": "", "device": addr[0]} - variables = self.getResult(pdu) - - varbinds = [] - for vb_oid, vb_value in variables: - vb_value = decode_snmp_value(vb_value) - vb_oid = '.'.join(map(str, vb_oid)) - if vb_value is None: - log.debug( - "[decodeSnmpV2OrV3] varbind-oid %s, varbind-value %s", - vb_oid, vb_value - ) - - # SNMPv2-MIB/snmpTrapOID - if vb_oid == '1.3.6.1.6.3.1.1.4.1.0': - result["oid"] = vb_value - eventType = self.oid2name( - vb_value, exactMatch=False, strip=False - ) - elif vb_oid.startswith('1.3.6.1.6.3.18.1.3'): - self.log.debug("found snmpTrapAddress OID: %s = %s", - vb_oid, vb_value) - result['snmpTrapAddress'] = vb_value - result['device'] = vb_value - else: - varbinds.append((vb_oid, vb_value)) - - result.update(self._process_varbinds(varbinds)) - - if eventType in ["linkUp", "linkDown"]: - eventType = "snmp_" + eventType - - return eventType, result - - def asyncHandleTrap(self, addr, pdu, startProcessTime): - """ - Twisted callback to process a trap - - @param addr: packet-sending host's IP address, port info - @type addr: ( host-ip, port) - @param pdu: Net-SNMP object - @type pdu: netsnmp_pdu object - @param startProcessTime: time stamp - @type startProcessTime: datetime - @return: Twisted deferred object - @rtype: Twisted deferred object - """ - self.capturePacket(addr[0], addr, pdu) - - # Some misbehaving agents will send SNMPv1 traps contained within - # an SNMPv2c PDU. So we can't trust tpdu.version to determine what - # version trap exists within the PDU. We need to assume that a - # PDU contains an SNMPv1 trap if the enterprise_length is greater - # than zero in addition to the PDU version being 0. - if pdu.version == SNMPv1 or pdu.enterprise_length > 0: - self.log.debug("SNMPv1 trap, Addr: %s PDU Agent Addr: %s", - str(addr), str(pdu.agent_addr)) - eventType, result = self.decodeSnmpv1(addr, pdu) - elif pdu.version in (SNMPv2, SNMPv3): - self.log.debug("SNMPv2 or v3 trap, Addr: %s", str(addr)) - eventType, result = self.decodeSnmpV2OrV3(addr, pdu) - else: - self.log.error("Unable to handle trap version %d", pdu.version) - return - self.log.debug("asyncHandleTrap: eventType=%s oid=%s snmpVersion=%s", - eventType, result['oid'], result['snmpVersion']) - - community = self.getCommunity(pdu) - result['zenoss.trap_source_ip'] = addr[0] - self.sendTrapEvent(result, community, eventType, - startProcessTime) - - if self.isReplaying(): - self.replayed += 1 - # Don't attempt to respond back if we're replaying packets - return - - if pdu.command == netsnmp.SNMP_MSG_INFORM: - self.snmpInform(addr, pdu) - - def sendTrapEvent(self, result, community, eventType, startProcessTime): - summary = 'snmp trap %s' % eventType - self.log.debug(summary) - result.setdefault('component', '') - result.setdefault('eventClassKey', eventType) - result.setdefault('eventGroup', 'trap') - result.setdefault('severity', SEVERITY_WARNING) - result.setdefault('summary', summary) - result.setdefault('community', community) - result.setdefault('firstTime', startProcessTime) - result.setdefault('lastTime', startProcessTime) - result.setdefault('monitor', self.options.monitor) - self._eventService.sendEvent(result) - self.stats.add(time.time() - startProcessTime) - - def displayStatistics(self): - totalTime, totalEvents, maxTime = self.stats.report() - display = "%d events processed in %.2f seconds" % (totalEvents, - totalTime) - if totalEvents > 0: - display += """ -%.5f average seconds per event -Maximum processing time for one event was %.5f""" % ( - (totalTime / totalEvents), maxTime) - return display - - def cleanup(self): - if self.session: - self.session.close() - status = self.displayStatistics() - self.log.info(status) - - -class Decoders: - """methods to decode OID values - """ - - @staticmethod - def dateandtime(value): - """Tries converting a DateAndTime value to a printable string. - - A date-time specification. - field octets contents range - ----- ------ -------- ----- - 1 1-2 year* 0..65536 - 2 3 month 1..12 - 3 4 day 1..31 - 4 5 hour 0..23 - 5 6 minutes 0..59 - 6 7 seconds 0..60 - (use 60 for leap-second) - 7 8 deci-seconds 0..9 - 8 9 direction from UTC '+' / '-' - 9 10 hours from UTC* 0..13 - 10 11 minutes from UTC 0..59 - """ - try: - dt, tz = value[:8], value[8:] - if len(dt) != 8 or len(tz) != 3: - return None - (year, mon, day, hour, mins, secs, dsecs) = unpack(">HBBBBBB", dt) - # Ensure valid date representation - invalid = ( - (mon < 1 or mon > 12), - (day < 1 or day > 31), - (hour > 23), - (mins > 59), - (secs > 60), - (dsecs > 9), - ) - if any(invalid): - return None - (utc_dir, utc_hour, utc_min) = unpack(">cBB", tz) - # Some traps send invalid UTC times (direction is 0) - if utc_dir == '\x00': - tz_min = time.timezone / 60 - if tz_min < 0: - utc_dir = '-' - tz_min = -tz_min - else: - utc_dir = '+' - utc_hour = tz_min / 60 - utc_min = tz_min % 60 - if utc_dir not in ('+', '-'): - return None - return "%04d-%02d-%02dT%02d:%02d:%02d.%d00%s%02d:%02d" % ( - year, mon, day, hour, mins, secs, - dsecs, utc_dir, utc_hour, utc_min - ) - except TypeError: - pass - - @staticmethod - def oid(value): - if isinstance(value, tuple) \ - and len(value) > 2 \ - and value[0] in (0, 1, 2) \ - and all(isinstance(i, int) for i in value): - return '.'.join(map(str, value)) - - @staticmethod - def number(value): - return value if isinstance(value, (long, int)) else None - - @staticmethod - def ipaddress(value): - for version in (socket.AF_INET, socket.AF_INET6): - try: - return socket.inet_ntop(version, value) - except (ValueError, TypeError): - pass - - @staticmethod - def utf8(value): - try: - return value.decode('utf8') - except (UnicodeDecodeError, AttributeError): - pass - - @staticmethod - def encode_base64(value): - return 'BASE64:' + base64.b64encode(value) - - -# NOTE: The order of decoders in the list determines their priority -_decoders = [ - Decoders.oid, - Decoders.number, - Decoders.utf8, - Decoders.ipaddress, - Decoders.dateandtime, - Decoders.encode_base64 -] - - -def decode_snmp_value(value): - """Given a raw OID value - Itterate over the list of decoder methods in order - Returns the first value returned by a decoder method - """ - if value is None: - return value - try: - for decoder in _decoders: - out = decoder(value) - if out is not None: - return out - except Exception as err: - log.exception("Unexpected exception: %s", err) - - -@implementer(IScheduledTask) -class MibConfigTask(ObservableMixin): - """ - Receive a configuration object containing MIBs and update the - mapping of OIDs to names. - """ - - def __init__(self, taskName, configId, - scheduleIntervalSeconds=3600, taskConfig=None): - super(MibConfigTask, self).__init__() - - # Needed for ZCA interface contract - self.name = taskName - self.configId = configId - self.state = TaskStates.STATE_IDLE - self.interval = scheduleIntervalSeconds - self._preferences = taskConfig - self._daemon = getUtility(ICollector) - - self._daemon.oidMap = self._preferences.oidMap - - def doTask(self): - return defer.succeed("Already updated OID -> name mappings...") - - def cleanup(self): - pass - - -class TrapDaemon(CollectorDaemon): - - _frameworkFactoryName = "nosip" - - def __init__(self, *args, **kwargs): - self._trapFilter = TrapFilter() - provideUtility(self._trapFilter, ICollectorEventTransformer) - kwargs["initializationCallback"] = self._initializeTrapFilter - super(TrapDaemon, self).__init__(*args, **kwargs) - - def _initializeTrapFilter(self): - try: - self._trapFilter.initialize() - initializationSucceededEvent = { - 'component': 'zentrap', - 'device': self.options.monitor, - 'eventClass': "/Status", - 'eventKey': "TrapFilterInit", - 'summary': 'initialized', - 'severity': Clear, - } - self.sendEvent(initializationSucceededEvent) - - except TrapFilterError as e: - initializationFailedEvent = { - 'component': 'zentrap', - 'device': self.options.monitor, - 'eventClass': "/Status", - 'eventKey': "TrapFilterInit", - 'summary': 'initialization failed', - 'message': e.message, - 'severity': Critical, - } - - log.error("Failed to initialize trap filter: %s", e.message) - self.sendEvent(initializationFailedEvent) - self.setExitCode(1) - self.stop() - - def runPostConfigTasks(self, result=None): - # 1) super sets self._prefs.task with the call to postStartupTasks - # 2) call remote createAllUsers - # 3) service in turn walks DeviceClass tree and returns users - CollectorDaemon.runPostConfigTasks(self, result) - if not isinstance(result, Failure) and self._prefs.task is not None: - service = self.getRemoteConfigServiceProxy() - log.debug('TrapDaemon.runPostConfigTasks callRemote createAllUsers') - d = service.callRemote("createAllUsers") - d.addCallback(self._createUsers) - - def remote_createUser(self, user): - reactor.callInThread(self._createUsers, [user]) - - def _createUsers(self, users): - log.debug('TrapDaemon._createUsers %s users', len(users)) - if self._prefs.task.session is None: - log.debug("No session created, so unable to create users") - else: - self._prefs.task.session.create_users(users) - - -if __name__ == '__main__': - myPreferences = SnmpTrapPreferences() - myTaskFactory = SimpleTaskFactory(MibConfigTask) - myTaskSplitter = SimpleTaskSplitter(myTaskFactory) - daemon = TrapDaemon(myPreferences, myTaskSplitter) - daemon.run() diff --git a/Products/ZenEvents/zentrap/__init__.py b/Products/ZenEvents/zentrap/__init__.py new file mode 100644 index 0000000000..d5c98586fe --- /dev/null +++ b/Products/ZenEvents/zentrap/__init__.py @@ -0,0 +1,8 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## diff --git a/Products/ZenEvents/zentrap/__main__.py b/Products/ZenEvents/zentrap/__main__.py new file mode 100755 index 0000000000..47c8298b7a --- /dev/null +++ b/Products/ZenEvents/zentrap/__main__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +from Products.ZenEvents.zentrap.app import TrapDaemon + +if __name__ == "__main__": + TrapDaemon().run() diff --git a/Products/ZenEvents/zentrap/app.py b/Products/ZenEvents/zentrap/app.py new file mode 100644 index 0000000000..2ac123f013 --- /dev/null +++ b/Products/ZenEvents/zentrap/app.py @@ -0,0 +1,347 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2007, 2011-2012, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +"""zentrap + +Creates events from SNMP Traps. +Currently a wrapper around the Net-SNMP C library. +""" + +from __future__ import absolute_import, print_function + +import socket +import sys +import time + +from twisted.internet import defer, reactor +from twisted.internet.task import LoopingCall + +from zope.component import provideUtility + +from Products.ZenCollector.utils.maintenance import ZenHubHeartbeatSender +from Products.ZenEvents.ZenEventClasses import Info +from Products.ZenHub.interfaces import ICollectorEventTransformer +from Products.ZenHub.PBDaemon import PBDaemon +from Products.ZenHub.services.SnmpTrapConfig import User # noqa: F401 + +from .capture import Capture +from .filterspec import FilterSpecification +from .handlers import TrapHandler, ReplayTrapHandler +from .net import ipv6_is_enabled +from .oidmap import OidMap +from .receiver import Receiver +from .replay import PacketReplay +from .trapfilter import TrapFilter +from .users import CreateAllUsers + +_dropped_events_task_interval = 3600 + + +class TrapDaemon(PBDaemon): + """ + Daemon for monitoring SNMP traps and sending events derived from + recieved traps. + """ + + mname = name = "zentrap" + + _cacheservice = "Products.ZenCollector.services.ConfigCache" + _configservice = "Products.ZenHub.services.SnmpTrapConfig" + initialServices = PBDaemon.initialServices + [ + _cacheservice, + _configservice, + ] + + def __init__(self, *args, **kwargs): + super(TrapDaemon, self).__init__(*args, **kwargs) + + self.configCycleInterval = 2 * 60 # seconds + self.cycleInterval = 5 * 60 # seconds + + filterspec = FilterSpecification(self.options.monitor) + self._trapfilter = TrapFilter(self, filterspec) + provideUtility(self._trapfilter, ICollectorEventTransformer) + self._trapfilter_task = None + + self._heartbeat_sender = ZenHubHeartbeatSender( + self.options.monitor, + self.name, + self.options.heartbeatTimeout, + ) + self._heartbeat_task = None + + self._oidmap = OidMap(self) + self._oidmap_task = None + + self._createusers = None + self._createusers_task = None + + self._dropped_events_task = None + + self._receiver = None + + def buildOptions(self): + super(TrapDaemon, self).buildOptions() + try: + TRAP_PORT = socket.getservbyname("snmptrap", "udp") + except socket.error: + TRAP_PORT = 162 + self.parser.add_option( + "--trapport", + "-t", + dest="trapport", + type="int", + default=TRAP_PORT, + help="Listen for SNMP traps on this port", + ) + self.parser.add_option( + "--useFileDescriptor", + dest="useFileDescriptor", + type="int", + default=None, + help="Read from an existing connection " + "rather than opening a new port", + ) + self.parser.add_option( + "--varbindCopyMode", + dest="varbindCopyMode", + type="int", + default=2, + help="Varbind copy mode. Possible values: " + "0 - the varbinds are copied into event as one field and " + "ifIndex field is added. " + "1 - the varbinds are copied into event as several fields " + "and sequence field is added. " + "2 - the mixed mode. Uses varbindCopyMode=0 behaviour if " + "there is only one occurrence of the varbind, otherwise " + "uses varbindCopyMode=1 behaviour", + ) + self.parser.add_option( + "--oidmap-update-interval", + type="int", + default=5, + help="The interval, in minutes, between checks for " + "updates to the SNMP OID configuration", + ) + Capture.add_options(self.parser) + PacketReplay.add_options(self.parser) + + # @override + def run(self): + if ( + not self.options.replayFilePrefix + and not self.options.useFileDescriptor + and self.options.trapport < 1024 + ): + self.log.info("opening privileged port %s", self.options.trapport) + listen_ip = "ipv6" if ipv6_is_enabled() else "0.0.0.0" # noqa: S104 + # Makes call to zensocket here + # does an exec* so it never returns + self.openPrivilegedPort( + "--listen", + "--proto=udp", + "--port=%s:%d" % (listen_ip, self.options.trapport), + ) + self.log.error("Failed to open privileged port") + sys.exit(1) + + super(TrapDaemon, self).run() + + # @override + @defer.inlineCallbacks + def connected(self): + # Load the trap filters and oid map before starting tasks. + # These 'yield' statements are blocking calls within this method. + yield self._trapfilter.task() + yield self._oidmap.task() + + replay = PacketReplay.from_options(self.options) + if replay: + # A `replay` object was created, so replay previously + # captured packets. + self._replay_packets(replay) + else: + # Start tasks used for normal (non-replay) operation. + self._start_dropped_events_task() + self._start_heartbeat_task() + self._start_receiver() + + # @override + def postStatisticsImpl(self): + if self._receiver is None: + return + totalTime, totalEvents, maxTime = self._receiver.handler.stats.report() + self.rrdStats.counter("events", totalEvents) + + @defer.inlineCallbacks + def getRemoteConfigCacheProxy(self): + """Return the remote configuration cache proxy.""" + proxy = yield self.getService(self._cacheservice) + defer.returnValue(proxy) + + @defer.inlineCallbacks + def getRemoteConfigServiceProxy(self): + """Return the remote configuration service proxy object.""" + proxy = yield self.getService(self._configservice) + defer.returnValue(proxy) + + def _start_dropped_events_task(self): + self._dropped_events_task = LoopingCall( + self._send_dropped_trap_count_event + ) + self._dropped_events_task.start(_dropped_events_task_interval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_dropped_events_task + ) + self.log.info( + "started task to send an event with the current " + "dropped events count" + ) + + def _stop_dropped_events_task(self): + if self._dropped_events_task is None: + return + self._dropped_events_task.stop() + self._dropped_events_task = None + self.log.info( + "stopped task to send an event with the current " + "dropped events count" + ) + + def _send_dropped_trap_count_event(self): + counterName = "eventFilterDroppedCount" + count = self.counters[counterName] + self.log.info("sma stat event, counter %s: %s", counterName, count) + counterEvent = { + "component": "zentrap", + "device": self.options.monitor, + "eventClass": "/App/Zenoss", + "eventKey": "zentrap.{}".format(counterName), + "summary": "{}: {}".format(counterName, count), + "severity": Info, + } + self.sendEvent(counterEvent) + + def _replay_packets(self, replay): + handler = ReplayTrapHandler( + self._oidmap, + self.options.varbindCopyMode, + self.options.monitor, + self, + ) + for packet in replay: + handler((packet.host, packet.port), packet, time.time()) + + def _start_heartbeat_task(self): + self._heartbeat_task = LoopingCall(self._heartbeat_sender.heartbeat) + self._heartbeat_task.start(self.cycleInterval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_heartbeat_task + ) + self.log.info("started task for sending heartbeats") + + def _stop_heartbeat_task(self): + if self._heartbeat_task is None: + return + self._heartbeat_task.stop() + self._heartbeat_task = None + self.log.info("stopped task for sending heartbeats") + + def _start_receiver(self): + self._start_trapfilter_task() + self._start_oidmap_task() + + try: + handler = TrapHandler( + self._oidmap, + self.options.varbindCopyMode, + self.options.monitor, + self, + ) + # Attempt to wrap the trap handler in a `Capture` object. + # If a `Capture` object is created, it becomes the handler. + capture = Capture.wrap_handler(self.options, handler) + if capture: + handler = capture + + self._receiver = Receiver(self.options, handler) + + self._createusers = CreateAllUsers(self, self._receiver) + self._start_createusers_task() + + self._receiver.start() + reactor.addSystemEventTrigger( + "before", "shutdown", self._receiver.stop + ) + reactor.addSystemEventTrigger( + "after", "shutdown", self._displayStatistics + ) + except Exception: + self.log.exception("failed to initialize receiver") + + def _start_trapfilter_task(self): + self._trapfilter_task = LoopingCall(self._trapfilter.task) + self._trapfilter_task.start(self.configCycleInterval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_trapfilter_task + ) + self.log.info("started task to retrieve trap filters") + + def _stop_trapfilter_task(self): + if self._trapfilter_task: + self._trapfilter_task.stop() + self._trapfilter_task = None + self.log.info("stopped task to retrieve trap filters") + + def _start_oidmap_task(self): + self._oidmap_task = LoopingCall(self._oidmap.task) + self._oidmap_task.start(self.configCycleInterval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_oidmap_task + ) + self.log.info("started task to retrieve the OID map") + + def _stop_oidmap_task(self): + if self._oidmap_task: + self._oidmap_task.stop() + self._oidmap_task = None + self.log.info("stopped task to retrieve the OID map") + + def _start_createusers_task(self): + self._createusers_task = LoopingCall(self._createusers.task) + self._createusers_task.start(self.configCycleInterval) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_createusers_task + ) + self.log.info("started task to retrieve and create users") + + def _stop_createusers_task(self): + if self._createusers_task is None: + return + self._createusers_task.stop() + self._createusers_task = None + self.log.info("stopped task to retrieve and create users") + + def remote_createUser(self, user): + reactor.callInThread(self._createusers.create_users, [user]) + + def _displayStatistics(self): + totalTime, totalEvents, maxTime = self._receiver.handler.stats.report() + display = "%d events processed in %.2f seconds" % ( + totalEvents, + totalTime, + ) + if totalEvents > 0: + display += """ +%.5f average seconds per event +Maximum processing time for one event was %.5f""" % ( + (totalTime / totalEvents), + maxTime, + ) + self.log.info(display) diff --git a/Products/ZenEvents/zentrap/capture.py b/Products/ZenEvents/zentrap/capture.py new file mode 100644 index 0000000000..d9ad28e732 --- /dev/null +++ b/Products/ZenEvents/zentrap/capture.py @@ -0,0 +1,202 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import abc +import cPickle +import logging + +from optparse import OptionValueError + +import six + +from pynetsnmp import netsnmp + +from .net import FakePacket, SNMPv1 + +log = logging.getLogger("zen.zentrap.capture") + + +@six.add_metaclass(abc.ABCMeta) +class PacketCapture(object): + """ + Capture raw network packets. + """ + + @staticmethod + def add_options(parser): + parser.add_option( + "--captureFilePrefix", + dest="captureFilePrefix", + action="callback", + callback=_validate_capturefileprefix, + default=None, + help="Directory and filename to use as a template " + "to store captured raw trap packets.", + ) + parser.add_option( + "--captureAll", + dest="captureAll", + action="callback", + callback=_handle_captureall, + default=False, + help="Capture all packets.", + ) + parser.add_option( + "--captureIps", + dest="captureIps", + action="callback", + callback=_handle_captureips, + default="", + help="Comma-separated list of IP addresses to capture.", + ) + + @classmethod + def from_options(cls, options): + """ + Returns a PacketCapture object if the `captureFilePrefix` attribute + of the `options` parameter is not empty. + """ + if options.captureFilePrefix: + if options.captureIps or options.captureAll: + return cls( + options.replayFilePrefix, + options.captureAll, + options.captureIps, + ) + else: + log.warning( + "ignoring --captureFilePrefix because neither " + "--captureAll nor --captureIps was specified" + ) + + def __init__(self, fileprefix, allips, ips): + self._fileprefix = fileprefix + self._filecount = 0 + if allips: + self._ips = None + self._include = lambda x: True + else: + self._ips = tuple(ips.split(",")) + self._include = lambda x: x in self._ips + + @property + def include(self): # () -> True | Tuple(str) + """ + Returns what packets are captured. + + Returns True to indicate all packets are captured. + + Returns a tuple of IP address strings indicating that only packets + from the given IP addesses are captured. + """ + return self._ips if self._ips else True + + @abc.abstractmethod + def to_pickleable(self, *data): + """ + Returns a pickleable object. + + The pickleable object should contain some form of the content + provided by the `data` argument. + """ + + def capture(self, hostname, *data): + """ + Store the raw packet for later examination and troubleshooting. + + @param hostname: packet-sending host's name or IP address + @type hostname: string + @param data: raw packet and other necessary arguments + @type data: List[Any] + """ + if not self._include(hostname): + log.debug("ignored packet source=%s", hostname) + return + + name = "%s-%s-%d" % (self._fileprefix, hostname, self._filecount) + try: + serializable = self.to_pickleable(*data) + serialized = cPickle.dumps(serializable, cPickle.HIGHEST_PROTOCOL) + with open(name, "wb") as fp: + fp.write(serialized) + self._filecount += 1 + log.debug("captured packet source=%s", hostname) + except Exception: + log.exception( + "failed to capture packet source=%s file=%s", hostname, name + ) + + +def _validate_capturefileprefix(option, optstr, value, parser): + if getattr(parser.values, "replayFilePrefix", None): + raise OptionValueError( + "can't use --captureFilePrefix with --replayFilePrefix" + ) + setattr(parser.values, option.dest, value) + + +def _handle_captureall(option, optstr, value, parser): + if getattr(parser.values, "captureIps", None): + raise OptionValueError("can't use --captureAll with --captureIps") + setattr(parser.values, option.dest, True) + + +def _handle_captureips(option, optstr, value, parser): + if getattr(parser.values, "captureAll", None): + raise OptionValueError("can't use --captureIps with --captureAll") + setattr(parser.values, option.dest, value) + + +class Capture(PacketCapture): + """ + Wraps a TrapHandler to capture packets. + """ + + @classmethod + def wrap_handler(cls, options, handler): + capture = cls.from_options(options) + if capture: + capture._handler = handler + return capture + + def __call__(self, addr, pdu, starttime): + self.capture(addr[0], addr, pdu) + self._handler(addr, pdu, starttime) + + def to_pickleable(self, addr, pdu): + """ + Store the raw packet for later examination and troubleshooting. + + @param addr: packet-sending host's IP address and port + @type addr: (string, number) + @param pdu: raw packet + @type pdu: binary + @return: Python FakePacket object + @rtype: Python FakePacket object + """ + packet = FakePacket() + packet.version = pdu.version + packet.host = addr[0] + packet.port = addr[1] + packet.variables = netsnmp.getResult(pdu, log) + packet.community = "" + packet.enterprise_length = pdu.enterprise_length + + # Here's where we start to encounter differences between packet types + if pdu.version == SNMPv1: + # SNMPv1 can't be received via IPv6 + packet.agent_addr = [pdu.agent_addr[i] for i in range(4)] + packet.trap_type = pdu.trap_type + packet.specific_type = pdu.specific_type + packet.enterprise = self._handler.getEnterpriseString(pdu) + packet.community = self._handler.getCommunity(pdu) + + return packet diff --git a/Products/ZenEvents/zentrap/decode.py b/Products/ZenEvents/zentrap/decode.py new file mode 100644 index 0000000000..237d0800be --- /dev/null +++ b/Products/ZenEvents/zentrap/decode.py @@ -0,0 +1,144 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import base64 +import logging +import socket +import time + +from struct import unpack + +import six + +log = logging.getLogger("zen.zentrap") + + +def decode_snmp_value(value): + """Given a raw OID value + Itterate over the list of decoder methods in order + Returns the first value returned by a decoder method + """ + if value is None: + return value + try: + for decoder in _decoders: + out = decoder(value) + if out is not None: + return out + except Exception as err: + log.exception("Unexpected exception: %s", err) + + +def dateandtime(value): + """Tries converting a DateAndTime value to a printable string. + + A date-time specification. + field octets contents range + ----- ------ -------- ----- + 1 1-2 year* 0..65536 + 2 3 month 1..12 + 3 4 day 1..31 + 4 5 hour 0..23 + 5 6 minutes 0..59 + 6 7 seconds 0..60 + (use 60 for leap-second) + 7 8 deci-seconds 0..9 + 8 9 direction from UTC '+' / '-' + 9 10 hours from UTC* 0..13 + 10 11 minutes from UTC 0..59 + """ + try: + dt, tz = value[:8], value[8:] + if len(dt) != 8 or len(tz) != 3: + return None + (year, mon, day, hour, mins, secs, dsecs) = unpack(">HBBBBBB", dt) + # Ensure valid date representation + invalid = ( + (mon < 1 or mon > 12), + (day < 1 or day > 31), + (hour > 23), + (mins > 59), + (secs > 60), + (dsecs > 9), + ) + if any(invalid): + return None + (utc_dir, utc_hour, utc_min) = unpack(">cBB", tz) + # Some traps send invalid UTC times (direction is 0) + if utc_dir == "\x00": + tz_min = time.timezone / 60 + if tz_min < 0: + utc_dir = "-" + tz_min = -tz_min + else: + utc_dir = "+" + utc_hour = tz_min / 60 + utc_min = tz_min % 60 + if utc_dir not in ("+", "-"): + return None + return "%04d-%02d-%02dT%02d:%02d:%02d.%d00%s%02d:%02d" % ( + year, + mon, + day, + hour, + mins, + secs, + dsecs, + utc_dir, + utc_hour, + utc_min, + ) + except TypeError: + pass + + +def oid(value): + if ( + isinstance(value, tuple) + and len(value) > 2 + and value[0] in (0, 1, 2) + and all(isinstance(i, int) for i in value) + ): + return ".".join(map(str, value)) + + +def number(value): + return value if isinstance(value, six.integer_types) else None + + +def ipaddress(value): + for version in (socket.AF_INET, socket.AF_INET6): + try: + return socket.inet_ntop(version, value) + except (ValueError, TypeError): + pass + + +def utf8(value): + try: + return value.decode("utf8") + except (UnicodeDecodeError, AttributeError): + pass + + +def encode_base64(value): + return "BASE64:" + base64.b64encode(value) + + +# NOTE: The order of decoders in the list determines their priority +_decoders = [ + oid, + number, + utf8, + ipaddress, + dateandtime, + encode_base64, +] diff --git a/Products/ZenEvents/zentrap/filterspec.py b/Products/ZenEvents/zentrap/filterspec.py new file mode 100644 index 0000000000..9826dd0980 --- /dev/null +++ b/Products/ZenEvents/zentrap/filterspec.py @@ -0,0 +1,526 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging +import re + +from Products.ZenEvents import ZenEventClasses as severity + +log = logging.getLogger("zen.zentrap") + + +class FilterSpecification(object): + def __init__(self, monitor): + self._monitor = monitor + + # Map of SNMP V1 Generic Trap filters where key is the generic trap + # number and value is a GenericTrapFilterDefinition + self._v1Traps = {} + + # Map of SNMP V1 enterprise-specific traps where key is the count of + # levels in an OID, and value is a map of unique V1FilterDefinition + # objects for that number of OID levels. The map of + # V1FilterDefinition objects is keyed by "OID[-specificTrap]" + self._v1Filters = {} + + # Map of SNMP V2 enterprise-specific traps where key is the count of + # levels in an OID, and value is a map of unique V2FilterDefinition + # objects for that number of OID levels. The map of + # V2FilterDefinition objects is keyed by OID + self._v2Filters = {} + self._filtersDefined = False + + @property + def v1traps(self): + return self._v1Traps + + @property + def v1filters(self): + return self._v1Filters + + @property + def v2filters(self): + return self._v2Filters + + @property + def defined(self): + return self._filtersDefined + + def update_from_string(self, trapFilters, reset=True): + """ + Returns a sequence of events. + """ + if reset: + self._reset() + events = [] + for lineNumber, line in enumerate(trapFilters.split("\n")): + if line.startswith("#"): + continue + + # skip blank lines + line = line.strip() + if not line: + continue + + errorMessage = self._parseFilterDefinition(line, lineNumber) + if errorMessage: + events.append( + { + "device": "127.0.0.1", + "eventClass": "/App/Zenoss", + "severity": severity.Error, + "eventClassKey": "", + "summary": "SNMP Trap Filter processing issue", + "component": "zentrap", + "message": ( + "Failed to parse filter definition on " + "line {}: {}".format(lineNumber, errorMessage) + ), + "eventKey": "SnmpTrapFilter.{}".format(lineNumber), + } + ) + continue + numFiltersDefined = ( + len(self._v1Traps) + len(self._v1Filters) + len(self._v2Filters) + ) + self._filtersDefined = 0 != numFiltersDefined + if self._filtersDefined: + log.debug( + "finished reading filter configuration. Lines parsed:%s, " + "Filters defined:%s [v1Traps:%d, v1Filters:%d, " + "v2Filters:%d]", + lineNumber, + numFiltersDefined, + len(self._v1Traps), + len(self._v1Filters), + len(self._v2Filters), + ) + else: + log.warn("no zentrap filters defined.") + return events + + def _reset(self): + self._v1Traps.clear() + self._v1Filters.clear() + self._v2Filters.clear() + self._filtersDefined = False + + def _parseFilterDefinition(self, line, lineNumber): + """ + Parse an SNMP filter definition of the format: + [COLLECTOR REGEX] include|exclude v1|v2 + + @param line: The filter definition to parse + @type line: string + @param lineNumber: The line number of the filter defintion within + the file + @type line: int + @return: Returns None on success, or an error message on failure + @rtype: string + """ + tokens = line.split() + if len(tokens) < 3: + return "Incomplete filter definition" + + if _actions.search(tokens[0]): + collectorRegex = ".*" + action = tokens[0].lower() + snmpVersion = tokens[1].lower() + remainingTokens = tokens[2:] + else: + collectorRegex = tokens[0] + action = tokens[1].lower() + snmpVersion = tokens[2].lower() + remainingTokens = tokens[3:] + if action not in _actions: + return ( + "Invalid action '{}'; the only valid actions are " + "'{}' or '{}'".format( + tokens[0], _actions.include, _actions.exclude + ) + ) + elif snmpVersion not in _snmpVersions: + return ( + "Invalid SNMP version '{}'; the only valid versions are " + "'{}', '{}', or '{}'".format( + tokens[1], + _snmpVersions.v1, + _snmpVersions.v2, + _snmpVersions.v3, + ) + ) + + # Do not parse if CollectorRegex does not match collector name + try: + if not re.search(collectorRegex, self._monitor): + return None + except Exception as ex: + return "regular expression failure '{!r}'; {}".format( + collectorRegex, ex + ) + + if snmpVersion == _snmpVersions.v1: + return self._parseV1FilterDefinition( + lineNumber, action, remainingTokens, collectorRegex + ) + + return self._parseV2FilterDefinition( + lineNumber, action, remainingTokens, collectorRegex + ) + + def _override_definition(self, new, old): + """ + Routine to determine if a TrapFilterDefinition should be overridden. + Basic overiding rules: + 1. new collectorRegex is not default value '.*' & prev is + 2. new collectorRegex is not a regex & prev is + (exact collector name match) + """ + if new.collectorRegex == old.collectorRegex: + return False + override = False + isNotRegex = re.compile("^[\w-]+$") + if new.collectorRegex != ".*" and old.collectorRegex == ".*": + override = True + elif isNotRegex.match(new.collectorRegex) and not isNotRegex.match( + old.collectorRegex + ): + override = True + + if override: + log.debug( + "Trap Filter definition conflict override, collector " + "expression is more exacting. new:%r, prev:%r", + new.collectorRegex, + old.collectorRegex, + ) + return True + return False + + def _parseV1FilterDefinition( + self, lineNumber, action, remainingTokens, collectorRegex + ): + """ + Parse an SNMP V1 filter definition. + + Valid definitions have one of the following formats: + [COLLECTOR REGEX] v1 include|exclude TRAP_TYPE + [COLLECTOR REGEX] v1 include|exclude GLOBBED_OID + [COLLECTOR REGEX] v1 include|exclude OID *|SPECIFIC_TRAP + [COLLECTOR REGEX] v1 include|exclude OID + where + COLLECTOR REGEX is a regular expression pattern applied + against the collector zentrap is running under + TRAP_TYPE is a generic trap type in the rage [0-5] + GLOBBED_OID is an OID ending with ".*" + OID is a valid OID + SPECIFIC_TRAP is any specific trap type + (any non-negative integer) + Note that the last two cases are used for enterprise-specific + traps (i.e. where the generic trap type is 6). + + @param lineNumber: The line number of the filter defintion within + the file + @type line: int + @param action: The action for this line (include or exclude) + @type line: string + @param remainingTokens: The remaining (unparsed) tokens from the + filter definition + @type line: string array + @return: Returns None on success, or an error message on failure + @rtype: string + """ + if len(remainingTokens) > 2: + return ( + "Too many fields found; at most 4 fields allowed " + "for V1 filters" + ) + + oidOrGenericTrap = "" + if len(remainingTokens) > 0: + oidOrGenericTrap = remainingTokens[0].strip(".") + + if len(oidOrGenericTrap) == 1 and oidOrGenericTrap != "*": + return self._handle_generic_v1_trap( + oidOrGenericTrap, lineNumber, action, collectorRegex + ) + + result = self._validateOID(oidOrGenericTrap) + if result: + return "'%s' is not a valid OID: %s" % (oidOrGenericTrap, result) + + return self._handle_v1_oid( + oidOrGenericTrap, + remainingTokens, + lineNumber, + action, + collectorRegex, + ) + + def _handle_v1_oid( + self, oid, remainingTokens, lineNumber, action, collectorRegex + ): + filterDef = V1FilterDefinition(lineNumber, action, oid, collectorRegex) + if len(remainingTokens) == 2: + if oid.endswith("*"): + return "Specific trap not allowed with globbed OID" + filterDef.specificTrap = remainingTokens[1] + if ( + filterDef.specificTrap != "*" + and not filterDef.specificTrap.isdigit() + ): + return ( + "Specific trap '%s' invalid; must be " + "non-negative integer" % filterDef.specificTrap + ) + + key = oid + if filterDef.specificTrap is not None: + key = "".join([oid, "-", filterDef.specificTrap]) + + filtersByLevel = self._v1Filters.get(filterDef.levels(), None) + if filtersByLevel is None: + filtersByLevel = {key: filterDef} + self._v1Filters[filterDef.levels()] = filtersByLevel + elif key in filtersByLevel: + previousDef = filtersByLevel[key] + if not self._override_definition(filterDef, previousDef): + return ( + "OID '%s' conflicts with previous definition at line %d" + % (oid, previousDef.lineNumber) + ) + filtersByLevel[key] = filterDef + + def _handle_generic_v1_trap( + self, trapType, lineNumber, action, collectorRegex + ): + if not trapType.isdigit() or trapType not in "012345": + return "Invalid generic trap '%s'; must be one of 0-5" % (trapType) + + trapDef = GenericTrapFilterDefinition( + lineNumber, action, trapType, collectorRegex + ) + if trapType in self._v1Traps: + previousDef = self._v1Traps[trapType] + if not self._override_definition(trapDef, previousDef): + return ( + "Generic trap '%s' conflicts with previous " + "definition at line %d" + % (trapType, previousDef.lineNumber) + ) + self._v1Traps[trapType] = trapDef + + def _parseV2FilterDefinition( + self, lineNumber, action, remainingTokens, collectorRegex + ): + """ + Parse an SNMP V2 filter definition + Valid definitions have one of the following formats: + [COLLECTOR REGEX] v2 include|exclude OID + [COLLECTOR REGEX] v2 include|exclude GLOBBED_OID + where + COLLECTOR REGEX is a regular expression pattern applied + against the collector zentrap is running under + OID is an valid OID + GLOBBED_OID is an OID ending with ".*" + + @param lineNumber: The line number of the filter defintion within + the file + @type line: int + @param action: The action for this line (include or exclude) + @type line: string + @param remainingTokens: The remaining (unparsed) tokens from the + filter definition + @type line: string array + @return: Returns None on success, or an error message on failure + @rtype: string + """ + if len(remainingTokens) > 1: + return ( + "Too many fields found; at most 3 fields allowed " + "for V2 filters" + ) + + oid = "" + if len(remainingTokens) > 0: + oid = remainingTokens[0].strip(".") + result = self._validateOID(oid) + if result: + return "'%s' is not a valid OID: %s" % (oid, result) + + filterDef = V2FilterDefinition(lineNumber, action, oid, collectorRegex) + + filtersByLevel = self._v2Filters.get(filterDef.levels(), None) + if filtersByLevel is None: + filtersByLevel = {oid: filterDef} + self._v2Filters[filterDef.levels()] = filtersByLevel + elif oid in filtersByLevel: + previousDef = filtersByLevel[oid] + if not self._override_definition(filterDef, previousDef): + return ( + "OID '%s' conflicts with previous definition at line %d" + % (oid, previousDef.lineNumber) + ) + filtersByLevel[oid] = filterDef + + def _validateOID(self, oid): + """ + Simplistic SNMP OID validation. Not trying to enforce some RFC spec - + just weed out some of the more obvious mistakes + """ + if oid == "*": + return None + + if not oid: + return "Empty OID is invalid" + + validChars = set("0123456789.*") + if not all((char in validChars) for char in oid): + return "Invalid character found; only digits, '.' and '*' allowed" + + globCount = oid.count("*") + if ( + globCount > 1 + or oid.startswith(".*") + or (globCount == 1 and not oid.endswith(".*")) + ): + return ( + "When using '*', only a single '*' at the end of " + "OID is allowed" + ) + + if ".." in oid: + return "Consecutive '.'s not allowed" + + if "." not in oid: + return "At least one '.' required" + + +class _SNMPVersions(object): + __slots__ = ("v1", "v2", "v3") + + def __init__(self): + self.v1 = "v1" + self.v2 = "v2" + self.v3 = "v3" + + def __contains__(self, value): + return value == self.v1 or value == self.v2 or value == self.v3 + + +_snmpVersions = _SNMPVersions() + + +class _Actions(object): + __slots__ = ("exclude", "include", "search_regex") + + def __init__(self): + self.exclude = "exclude" + self.include = "include" + self.search_regex = re.compile( + "{}|{}".format(self.exclude, self.include), re.IGNORECASE + ) + + def search(self, text): + return self.search_regex.search(text) + + def __contains__(self, value): + return value == self.exclude or value == self.include + + +_actions = _Actions() + + +def countOidLevels(oid): + """ + @return: The number of levels in an OID + @rtype: int + """ + return oid.count(".") + 1 if oid else 0 + + +class BaseFilterDefinition(object): + def __init__(self, lineNumber=None, action=None, collectorRegex=None): + self.lineNumber = lineNumber + self.action = action + self.collectorRegex = collectorRegex + + @property + def exclude(self): + return self.action == _actions.exclude + + +class GenericTrapFilterDefinition(BaseFilterDefinition): + def __init__( + self, + lineNumber=None, + action=None, + genericTrap=None, + collectorRegex=None, + ): + BaseFilterDefinition.__init__(self, lineNumber, action, collectorRegex) + self.genericTrap = genericTrap + + def __eq__(self, other): + if not isinstance(other, GenericTrapFilterDefinition): + return NotImplemented + return self.genericTrap == other.genericTrap + + def __ne__(self, other): + if not isinstance(other, GenericTrapFilterDefinition): + return NotImplemented + return self.genericTrap != other.genericTrap + + def __hash__(self): + return hash(self.genericTrap) + + +class OIDBasedFilterDefinition(BaseFilterDefinition): + def __init__( + self, lineNumber=None, action=None, oid=None, collectorRegex=None + ): + BaseFilterDefinition.__init__(self, lineNumber, action, collectorRegex) + self.oid = oid + + def levels(self): + return countOidLevels(self.oid) + + def __eq__(self, other): + if not isinstance(other, OIDBasedFilterDefinition): + return NotImplemented + return self.oid == other.oid + + def __ne__(self, other): + if not isinstance(other, OIDBasedFilterDefinition): + return NotImplemented + return self.oid != other.oid + + def __hash__(self): + return hash(self.oid) + + +class V1FilterDefinition(OIDBasedFilterDefinition): + def __init__( + self, lineNumber=None, action=None, oid=None, collectorRegex=None + ): + OIDBasedFilterDefinition.__init__( + self, lineNumber, action, oid, collectorRegex + ) + self.specificTrap = None + + +class V2FilterDefinition(OIDBasedFilterDefinition): + def __init__( + self, lineNumber=None, action=None, oid=None, collectorRegex=None + ): + OIDBasedFilterDefinition.__init__( + self, lineNumber, action, oid, collectorRegex + ) diff --git a/Products/ZenEvents/zentrap/handlers.py b/Products/ZenEvents/zentrap/handlers.py new file mode 100644 index 0000000000..b26b62332a --- /dev/null +++ b/Products/ZenEvents/zentrap/handlers.py @@ -0,0 +1,300 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import ctypes +import logging +import time + +from pynetsnmp import netsnmp +from zenoss.protocols.protobufs.zep_pb2 import SEVERITY_WARNING + +from Products.ZenEvents.EventServer import Stats + +from .decode import decode_snmp_value +from .net import SNMPv1, SNMPv2, SNMPv3 +from .processors import ( + LegacyVarbindProcessor, + DirectVarbindProcessor, + MixedVarbindProcessor, +) + +log = logging.getLogger("zen.zentrap.handlers") + + +class TrapHandler(object): + """ + Handle raw SNMP traps. + """ + + _varbind_processors = { + LegacyVarbindProcessor.MODE: LegacyVarbindProcessor, + DirectVarbindProcessor.MODE: DirectVarbindProcessor, + MixedVarbindProcessor.MODE: MixedVarbindProcessor, + } + + def __init__(self, oidmap, copymode, monitor, eventsvc): + self._oidmap = oidmap + self._monitor = monitor + self._eventservice = eventsvc + self.stats = Stats() + + if copymode not in self._varbind_processors: + copymode = MixedVarbindProcessor.MODE + log.warn( + "Invalid 'varbindCopyMode' value. " + "'varbindCopyMode=%s' will be used", + copymode, + ) + processor_class = self._varbind_processors.get(copymode) + self._process_varbinds = processor_class(self._oidmap.to_name) + + def __call__(self, addr, pdu, starttime): + """ + Process a trap. + + @param addr: packet-sending host's IP address, port info + @type addr: (host-ip, port) + @param pdu: Net-SNMP object + @type pdu: netsnmp_pdu object + @param starttime: time stamp + @type starttime: float + @return: Twisted deferred object + @rtype: Twisted deferred object + """ + # Some misbehaving agents will send SNMPv1 traps contained within + # an SNMPv2c PDU. So we can't trust tpdu.version to determine what + # version trap exists within the PDU. We need to assume that a + # PDU contains an SNMPv1 trap if the enterprise_length is greater + # than zero in addition to the PDU version being 0. + if pdu.version == SNMPv1 or pdu.enterprise_length > 0: + log.debug( + "SNMPv1 trap, Addr: %s PDU Agent Addr: %s", + addr, + pdu.agent_addr, + ) + eventType, result = self.decodeSnmpv1(addr, pdu) + elif pdu.version in (SNMPv2, SNMPv3): + log.debug("SNMPv2 or v3 trap, Addr: %s", str(addr)) + eventType, result = self.decodeSnmpV2OrV3(addr, pdu) + else: + raise RuntimeError( + "Bad SNMP version string: '%s'" % (pdu.version,) + ) + + result["zenoss.trap_source_ip"] = addr[0] + community = self.getCommunity(pdu) + self.sendTrapEvent(result, community, eventType, starttime) + log.debug( + "handled trap event-type=%s oid=%s snmp-version=%s", + eventType, + result["oid"], + result["snmpVersion"], + ) + + def sendTrapEvent(self, result, community, eventType, starttime): + summary = "snmp trap %s" % eventType + log.debug(summary) + result.setdefault("component", "") + result.setdefault("eventClassKey", eventType) + result.setdefault("eventGroup", "trap") + result.setdefault("severity", SEVERITY_WARNING) + result.setdefault("summary", summary) + result.setdefault("community", community) + result.setdefault("firstTime", starttime) + result.setdefault("lastTime", starttime) + result.setdefault("monitor", self._monitor) + self._eventservice.sendEvent(result) + self.stats.add(time.time() - starttime) + + def decodeSnmpv1(self, addr, pdu): + result = {"snmpVersion": "1"} + result["device"] = addr[0] + + variables = self.getResult(pdu) + + log.debug("SNMPv1 pdu has agent_addr: %s", hasattr(pdu, "agent_addr")) + + if hasattr(pdu, "agent_addr"): + origin = ".".join(str(i) for i in pdu.agent_addr) + result["device"] = origin + + enterprise = self.getEnterpriseString(pdu) + generic = pdu.trap_type + specific = pdu.specific_type + + result["snmpV1Enterprise"] = enterprise + result["snmpV1GenericTrapType"] = generic + result["snmpV1SpecificTrap"] = specific + + # Try an exact match with a .0. inserted between enterprise and + # specific OID. It seems that MIBs frequently expect this .0. + # to exist, but the device's don't send it in the trap. + result["oid"] = "%s.0.%d" % (enterprise, specific) + name = self._oidmap.to_name( + result["oid"], exactMatch=True, strip=False + ) + + # If we didn't get a match with the .0. inserted we will try + # resolving with the .0. inserted and allow partial matches. + if name == result["oid"]: + result["oid"] = "%s.%d" % (enterprise, specific) + name = self._oidmap.to_name( + result["oid"], exactMatch=False, strip=False + ) + + # Look for the standard trap types and decode them without + # relying on any MIBs being loaded. + eventType = { + 0: "coldStart", + 1: "warmStart", + 2: "snmp_linkDown", + 3: "snmp_linkUp", + 4: "authenticationFailure", + 5: "egpNeighorLoss", + 6: name, + }.get(generic, name) + + # Decode all variable bindings. Allow partial matches and strip + # off any index values. + varbinds = [] + for vb_oid, vb_value in variables: + vb_value = decode_snmp_value(vb_value) + vb_oid = ".".join(map(str, vb_oid)) + if vb_value is None: + log.debug( + "[decodeSnmpv1] enterprise %s, varbind-oid %s, " + "varbind-value %s", + enterprise, + vb_oid, + vb_value, + ) + varbinds.append((vb_oid, vb_value)) + + result.update(self._process_varbinds(varbinds)) + + return eventType, result + + def decodeSnmpV2OrV3(self, addr, pdu): + eventType = "unknown" + version = "2" if pdu.version == SNMPv2 else "3" + result = {"snmpVersion": version, "oid": "", "device": addr[0]} + variables = self.getResult(pdu) + + varbinds = [] + for vb_oid, vb_value in variables: + vb_value = decode_snmp_value(vb_value) + vb_oid = ".".join(map(str, vb_oid)) + if vb_value is None: + log.debug( + "[decodeSnmpV2OrV3] varbind-oid %s, varbind-value %s", + vb_oid, + vb_value, + ) + + # SNMPv2-MIB/snmpTrapOID + if vb_oid == "1.3.6.1.6.3.1.1.4.1.0": + result["oid"] = vb_value + eventType = self._oidmap.to_name( + vb_value, exactMatch=False, strip=False + ) + elif vb_oid.startswith("1.3.6.1.6.3.18.1.3"): + log.debug( + "found snmpTrapAddress OID: %s = %s", vb_oid, vb_value + ) + result["snmpTrapAddress"] = vb_value + result["device"] = vb_value + else: + varbinds.append((vb_oid, vb_value)) + + result.update(self._process_varbinds(varbinds)) + + if eventType in ["linkUp", "linkDown"]: + eventType = "snmp_" + eventType + + return eventType, result + + def getEnterpriseString(self, pdu): + """ + Get the enterprise string from the PDU or replayed packet + + @param pdu: raw packet + @type pdu: binary + @return: enterprise string + @rtype: string + """ + return ".".join( + str(pdu.enterprise[i]) for i in range(pdu.enterprise_length) + ) + + def getResult(self, pdu): + """ + Get the values from the PDU or replayed packet + + @param pdu: raw packet + @type pdu: binary + @return: variables from the PDU or Fake packet + @rtype: dictionary + """ + return netsnmp.getResult(pdu, log) + + def getCommunity(self, pdu): + """ + Get the community string from the PDU or replayed packet + + @param pdu: raw packet + @type pdu: binary + @return: SNMP community + @rtype: string + """ + if pdu.community_len: + return ctypes.string_at(pdu.community, pdu.community_len) + return "" + + +class ReplayTrapHandler(TrapHandler): + """ + Handle replayed SNMP traps. + + Used for replaying capture SNMP trap packets. + """ + + def getEnterpriseString(self, pdu): + """ + Get the enterprise string from the PDU or replayed packet + + @param pdu: raw packet + @type pdu: FakePacket + @return: enterprise string + @rtype: string + """ + return pdu.enterprise + + def getResult(self, pdu): + """ + Get the values from the PDU or replayed packet + + @param pdu: raw packet + @type pdu: FakePacket + @return: variables from the PDU or Fake packet + @rtype: dictionary + """ + return pdu.variables + + def getCommunity(self, pdu): + """ + Get the community string from the PDU or replayed packet + + @param pdu: raw packet + @type pdu: FakePacket + @return: SNMP community + @rtype: string + """ + return pdu.community diff --git a/Products/ZenEvents/zentrap/net.py b/Products/ZenEvents/zentrap/net.py new file mode 100644 index 0000000000..e49ecdaac2 --- /dev/null +++ b/Products/ZenEvents/zentrap/net.py @@ -0,0 +1,81 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import errno +import logging +import socket +import sys + +import ctypes as c # Magical interfacing with C code + +from pynetsnmp import netsnmp + +# Version codes from the PDU +SNMPv1 = netsnmp.SNMP_VERSION_1 +SNMPv2 = netsnmp.SNMP_VERSION_2c +SNMPv3 = netsnmp.SNMP_VERSION_3 + +log = logging.getLogger("zen.zentrap") + +# This is what struct sockaddr_in {} looks like +family = [("family", c.c_ushort)] +if sys.platform == "darwin": + family = [("len", c.c_ubyte), ("family", c.c_ubyte)] + + +class sockaddr_in(c.Structure): + _fields_ = family + [ + ("port", c.c_ubyte * 2), # need to decode from net-byte-order + ("addr", c.c_ubyte * 4), + ] + + +class sockaddr_in6(c.Structure): + _fields_ = family + [ + ("port", c.c_ushort), # need to decode from net-byte-order + ("flow", c.c_ubyte * 4), + ("addr", c.c_ubyte * 16), + ("scope_id", c.c_ubyte * 4), + ] + + +pre_parse_factory = c.CFUNCTYPE( + c.c_int, + c.POINTER(netsnmp.netsnmp_session), + c.POINTER(netsnmp.netsnmp_transport), + c.c_void_p, + c.c_int, +) + +# teach python that the return type of snmp_clone_pdu is a pdu pointer +netsnmp.lib.snmp_clone_pdu.restype = netsnmp.netsnmp_pdu_p + + +def ipv6_is_enabled(): + """test if ipv6 is enabled""" + # hack for ZEN-12088 - TODO: remove next line + return False + try: + socket.socket(socket.AF_INET6, socket.SOCK_DGRAM, 0) + except socket.error as e: + if e.errno == errno.EAFNOSUPPORT: + return False + raise + return True + + +class FakePacket(object): + """ + A fake object to make packet replaying feasible. + """ + + def __init__(self): + self.fake = True diff --git a/Products/ZenEvents/zentrap/oidmap.py b/Products/ZenEvents/zentrap/oidmap.py new file mode 100644 index 0000000000..f437ef779d --- /dev/null +++ b/Products/ZenEvents/zentrap/oidmap.py @@ -0,0 +1,81 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from twisted.internet import defer + +log = logging.getLogger("zen.zentrap.oidmap") + + +class OidMap(object): + """ + Retrieves the OID map from ZenHub. + """ + + def __init__(self, app): + self._app = app + self._checksum = None + self._oidmap = {} + + def to_name(self, oid, exactMatch=True, strip=False): + """ + Returns a MIB name based on an OID and special handling flags. + + @param oid: SNMP Object IDentifier + @type oid: string + @param exactMatch: find the full OID or don't match + @type exactMatch: boolean + @param strip: show what matched, or matched + numeric OID remainder + @type strip: boolean + @return: Twisted deferred object + @rtype: Twisted deferred object + """ + if isinstance(oid, tuple): + oid = ".".join(map(str, oid)) + + oid = oid.strip(".") + if exactMatch: + return self._oidmap.get(oid, oid) + + oidlist = oid.split(".") + for i in range(len(oidlist), 0, -1): + name = self._oidmap.get(".".join(oidlist[:i]), None) + if name is None: + continue + + oid_trail = oidlist[i:] + if len(oid_trail) > 0 and not strip: + return "%s.%s" % (name, ".".join(oid_trail)) + return name + + return oid + + @defer.inlineCallbacks + def task(self): + log.debug("retrieving oid map") + try: + service = yield self._app.getRemoteConfigCacheProxy() + checksum, oidmap = yield service.callRemote( + "getOidMap", self._checksum + ) + if checksum is None: + if self._checksum is None: + log.info("waiting for the OID map to be built") + else: + log.debug("no update available for the current OID map") + else: + state = "initial" if self._checksum is None else "updated" + self._checksum = checksum + self._oidmap = oidmap + log.info("received %s OID map", state) + except Exception: + log.exception("failed to retrieve oid map") diff --git a/Products/ZenEvents/zentrap/processors.py b/Products/ZenEvents/zentrap/processors.py new file mode 100644 index 0000000000..2007173424 --- /dev/null +++ b/Products/ZenEvents/zentrap/processors.py @@ -0,0 +1,92 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from collections import defaultdict + +log = logging.getLogger("zen.zentrap") + + +class LegacyVarbindProcessor(object): + MODE = 0 + + def __init__(self, oid2name): + self.oid2name = oid2name + + def __call__(self, varbinds): + result = defaultdict(list) + for oid, value in varbinds: + base_name = self.oid2name(oid, exactMatch=False, strip=True) + full_name = self.oid2name(oid, exactMatch=False, strip=False) + result[base_name].append(str(value)) + if base_name != full_name: + suffix = full_name[len(base_name) + 1 :] + result[base_name + ".ifIndex"].append(suffix) + return {name: ",".join(vals) for name, vals in result.iteritems()} + + +class DirectVarbindProcessor(object): + MODE = 1 + + def __init__(self, oid2name): + self.oid2name = oid2name + + def __call__(self, varbinds): + result = defaultdict(list) + for oid, value in varbinds: + base_name = self.oid2name(oid, exactMatch=False, strip=True) + full_name = self.oid2name(oid, exactMatch=False, strip=False) + result[full_name].append(str(value)) + if base_name != full_name: + suffix = full_name[len(base_name) + 1 :] + result[base_name + ".sequence"].append(suffix) + return {name: ",".join(vals) for name, vals in result.iteritems()} + + +class MixedVarbindProcessor(object): + MODE = 2 + + def __init__(self, oid2name): + self.oid2name = oid2name + + def __call__(self, varbinds): + result = defaultdict(list) + groups = defaultdict(list) + + # Group varbinds having the same MIB Object name together + for key, value in varbinds: + base_name = self.oid2name(key, exactMatch=False, strip=True) + full_name = self.oid2name(key, exactMatch=False, strip=False) + groups[base_name].append((full_name, str(value))) + + # Process each MIB object by name + for base_name, data in groups.items(): + offset = len(base_name) + 1 + + # If there's only one instance for a given object, then add + # the varbind to the event details using pre Zenoss 6.2.0 rules. + if len(data) == 1: + full_name, value = data[0] + result[base_name].append(value) + + suffix = full_name[offset:] + if suffix: + result[base_name + ".ifIndex"].append(suffix) + continue + + # Record the varbind instance(s) in their 'raw' form. + for full_name, value in data: + suffix = full_name[offset:] + result[full_name].append(value) + if suffix: + result[base_name + ".sequence"].append(suffix) + return {name: ",".join(vals) for name, vals in result.iteritems()} diff --git a/Products/ZenEvents/zentrap/receiver.py b/Products/ZenEvents/zentrap/receiver.py new file mode 100644 index 0000000000..b4ebaf68f5 --- /dev/null +++ b/Products/ZenEvents/zentrap/receiver.py @@ -0,0 +1,220 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import ctypes +import logging +import socket +import time + +from ipaddr import IPAddress +from pynetsnmp import netsnmp, twistedsnmp + +from .net import ( + ipv6_is_enabled, + pre_parse_factory, + SNMPv1, + SNMPv2, + SNMPv3, + sockaddr_in, + sockaddr_in6, +) + +log = logging.getLogger("zen.zentrap.receiver") + + +class Receiver(object): + """ + Listen for SNMP traps. + """ + + def __init__(self, options, handler): + self._port = options.trapport + self._handler = handler + + listening_protocol = "udp6" if ipv6_is_enabled() else "udp" + if options.useFileDescriptor is not None: + self._address = listening_protocol + ":1162" + self._fileno = int(options.useFileDescriptor) + else: + self._address = "%s:%d" % (listening_protocol, self._port) + self._fileno = -1 + + self._pre_parse_callback = pre_parse_factory(_pre_parse) + + @property + def handler(self): + return self._handler + + def start(self): + # Start listening for SNMP traps + self._session = netsnmp.Session() + self._session.awaitTraps( + self._address, self._fileno, self._pre_parse_callback, debug=True + ) + self._session.callback = self._receive_packet + twistedsnmp.updateReactor() + log.info("listening for SNMP traps port=%s", self._port) + + def stop(self): + if self._session: + self._session.close() + self._session = None + + def create_users(self, users): + if self._session is None: + log.debug("No session created, so unable to create users") + else: + self._session.create_users(users) + + def _receive_packet(self, pdu): + """ + Accept a packet from the network. + + @param pdu: Net-SNMP object + @type pdu: netsnmp_pdu object + """ + start_time = time.time() + if pdu.version not in (SNMPv1, SNMPv2, SNMPv3): + log.error("unable to handle trap version %d", pdu.version) + return + if pdu.transport_data is None: + log.error("PDU does not contain transport data") + return + + ip_address, port = _get_addr_and_port_from_packet(pdu) + if ip_address is None: + return + log.debug("received packet from %s on port %s", ip_address, port) + try: + self._handler((ip_address, port), pdu, start_time) + except Exception: + log.error("unable to handle trap version %s", pdu.version) + else: + if pdu.command == netsnmp.CONSTANTS.SNMP_MSG_INFORM: + self.snmpInform((ip_address, port), pdu) + finally: + log.debug("completed handling trap") + + def snmpInform(self, addr, pdu): + """ + A SNMP trap can request that the trap recipient return back a response. + """ + try: + reply = netsnmp.lib.snmp_clone_pdu(ctypes.byref(pdu)) + if not reply: + log.error("could not clone PDU for INFORM response") + return + reply.contents.command = netsnmp.CONSTANTS.SNMP_MSG_RESPONSE + reply.contents.errstat = 0 + reply.contents.errindex = 0 + + # FIXME: might need to add udp6 for IPv6 addresses + sess = netsnmp.Session( + peername="%s:%d" % tuple(addr), version=pdu.version + ) + sess.open() + try: + if not netsnmp.lib.snmp_send(sess.sess, reply): + netsnmp.lib.snmp_sess_perror( + "unable to send inform PDU", self._session.sess + ) + finally: + sess.close() + finally: + netsnmp.lib.snmp_free_pdu(reply) + + +def _pre_parse(session, transport, transport_data, transport_data_length): + """ + Called before the net-snmp library parses the PDU. + In the case where a v3 trap comes in with unkwnown credentials, + net-snmp silently discards the packet. This method gives zentrap a + way to log that these packets were received to help with + troubleshooting. + """ + if log.isEnabledFor(logging.DEBUG): + ipv6_socket_address = ctypes.cast( + transport_data, ctypes.POINTER(sockaddr_in6) + ).contents + if ipv6_socket_address.family == socket.AF_INET6: + log.debug( + "pre_parse: IPv6 %s", + socket.inet_ntop(socket.AF_INET6, ipv6_socket_address.addr), + ) + elif ipv6_socket_address.family == socket.AF_INET: + ipv4_socket_address = ctypes.cast( + transport_data, ctypes.POINTER(sockaddr_in) + ).contents + log.debug( + "pre_parse: IPv4 %s", + socket.inet_ntop(socket.AF_INET, ipv4_socket_address.addr), + ) + else: + log.debug( + "pre_parse: unexpected address family: %s", + ipv6_socket_address.family, + ) + return 1 + + +def _getPacketIp(addr): + """ + For IPv4, convert a pointer to 4 bytes to a dotted-ip-address + For IPv6, convert a pointer to 16 bytes to a canonical IPv6 address. + """ + + def _gen_byte_pairs(): + for left, right in zip(addr[::2], addr[1::2]): + yield "%.2x%.2x" % (left, right) + + v4_mapped_prefix = [0x00] * 10 + [0xFF] * 2 + if addr[: len(v4_mapped_prefix)] == v4_mapped_prefix: + ip_address = ".".join(str(i) for i in addr[-4:]) + else: + try: + basic_v6_address = ":".join(_gen_byte_pairs()) + ip_address = str(IPAddress(basic_v6_address, 6)) + except ValueError: + log.warn("The IPv6 address is incorrect: %s", addr[:]) + ip_address = "::" + return ip_address + + +def _get_addr_and_port_from_packet(pdu): + ipv6_socket_address = ctypes.cast( + pdu.transport_data, ctypes.POINTER(sockaddr_in6) + ).contents + if ipv6_socket_address.family == socket.AF_INET6: + if pdu.transport_data_length < ctypes.sizeof(sockaddr_in6): + log.error( + "PDU transport data is too small for sockaddr_in6 struct." + ) + return (None, None) + ip_address = _getPacketIp(ipv6_socket_address.addr) + elif ipv6_socket_address.family == socket.AF_INET: + if pdu.transport_data_length < ctypes.sizeof(sockaddr_in): + log.error( + "PDU transport data is too small for sockaddr_in struct." + ) + return (None, None) + ipv4_socket_address = ctypes.cast( + pdu.transport_data, ctypes.POINTER(sockaddr_in) + ).contents + ip_address = ".".join(str(i) for i in ipv4_socket_address.addr) + else: + log.error( + "received a packet with unrecognized network family: %s", + ipv6_socket_address.family, + ) + return (None, None) + + port = socket.ntohs(ipv6_socket_address.port) + return (ip_address, port) diff --git a/Products/ZenEvents/zentrap/replay.py b/Products/ZenEvents/zentrap/replay.py new file mode 100644 index 0000000000..fbdaf8c806 --- /dev/null +++ b/Products/ZenEvents/zentrap/replay.py @@ -0,0 +1,99 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import collections +import cPickle +import glob +import logging + +from optparse import OptionValueError + +log = logging.getLogger("zen.zentrap.replay") + + +class PacketReplay(collections.Iterable): + """ + Returns an iterator that produces previously capture packets. + + The client code can 'replay' these packets in the same order that they + were captured. + """ + + @staticmethod + def add_options(parser): + parser.add_option( + "--replayFilePrefix", + dest="replayFilePrefix", + action="callback", + callback=_validate_replayfileprefix, + nargs=1, + type="str", + default=[], + help="Filename prefix containing captured packet data. " + "Can specify more than once.", + ) + + @classmethod + def from_options(cls, options): + """ + Returns a PacketReplay object if the `replayFilePrefix` attribute + of the `options` parameter is not empty. + """ + if options.replayFilePrefix: + return cls(options.replayFilePrefix) + + def __init__(self, fileprefixes): + self._fileprefixes = fileprefixes + + @property + def prefixes(self): + return self._fileprefixes + + @property + def filenames(self): + return self._filenames() + + def __iter__(self): + """ + Replay all captured packets using the files specified in + the --replayFilePrefix option and then exit. + """ + return self._packet_generator() + + def _packet_generator(self): + # Note what you are about to see below is a direct result of optparse + for name in self._filenames(): + log.debug("loading packet data from '%s'", name) + + try: + with open(name, "rb") as fp: + packet = cPickle.load(fp) + except (IOError, EOFError): + log.exception("failed to load packet data from %s", name) + continue + + yield packet + + def _filenames(self): + return sorted( + name + for prefix in self._fileprefix + for name in glob.glob(prefix + "*") + ) + + +def _validate_replayfileprefix(option, optstr, value, parser): + if getattr(parser.values, "captureFilePrefix", None): + raise OptionValueError( + "can't use --replayFilePrefix with --captureFilePrefix" + ) + prefixes = getattr(parser.values, option.dest) + prefixes.append(value) diff --git a/Products/ZenEvents/zentrap/tests/__init__.py b/Products/ZenEvents/zentrap/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenEvents/zentrap/tests/test_decode.py b/Products/ZenEvents/zentrap/tests/test_decode.py new file mode 100644 index 0000000000..752baff43a --- /dev/null +++ b/Products/ZenEvents/zentrap/tests/test_decode.py @@ -0,0 +1,123 @@ +import base64 +import logging + +from struct import pack +from unittest import TestCase + +from ..decode import decode_snmp_value + + +class DecodersUnitTest(TestCase): + def setUp(t): + logging.disable(logging.CRITICAL) + + def tearDown(t): + logging.disable(logging.NOTSET) + + def test_decode_oid(t): + value = (1, 2, 3, 4) + t.assertEqual(decode_snmp_value(value), "1.2.3.4") + + def test_decode_utf8(t): + value = "valid utf8 string \xc3\xa9".encode("utf8") + t.assertEqual( + decode_snmp_value(value), value.decode("utf8") + ) + + def test_decode_datetime(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "2017-12-20T11:50:50.800+06:05" + ) + + def test_decode_bad_timezone(t): + value = pack(">HBBBBBBBBB", 2017, 12, 20, 11, 50, 50, 8, 0, 0, 0) + dttm = decode_snmp_value(value) + t.assertEqual(dttm[:23], "2017-12-20T11:50:50.800") + t.assertRegexpMatches(dttm[23:], "^[+-][01][0-9]:[0-5][0-9]$") + + def test_decode_invalid_timezone(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 50, 8, "=", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_incomplete_datetime(t): + value = pack(">HBBBBBB", 2017, 12, 20, 11, 50, 50, 8) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_month_high(t): + value = pack(">HBBBBBBsBB", 2017, 13, 20, 11, 50, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_month_low(t): + value = pack(">HBBBBBBsBB", 2017, 0, 20, 11, 50, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_day_high(t): + value = pack(">HBBBBBBsBB", 2017, 12, 32, 11, 50, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_day_low(t): + value = pack(">HBBBBBBsBB", 2017, 12, 0, 11, 50, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_hour(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 24, 50, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_minute(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 60, 50, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_bad_second(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 61, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_leap_second(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 60, 8, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "2017-12-20T11:50:60.800+06:05" + ) + + def test_decode_bad_decisecond(t): + value = pack(">HBBBBBBsBB", 2017, 12, 20, 11, 50, 50, 10, "+", 6, 5) + t.assertEqual( + decode_snmp_value(value), "BASE64:" + base64.b64encode(value) + ) + + def test_decode_value_ipv4(t): + value = "\xcc\x0b\xc8\x01" + t.assertEqual(decode_snmp_value(value), "204.11.200.1") + + def test_decode_value_ipv6(t): + value = "Z\xef\x00+\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08" + t.assertEqual(decode_snmp_value(value), "5aef:2b::8") + + def test_decode_int_values(t): + value = int(555) + t.assertEqual(decode_snmp_value(value), int(555)) + + def test_encode_invalid_chars(t): + value = "\xde\xad\xbe\xef\xfe\xed\xfa\xce" + t.assertEqual(decode_snmp_value(value), "BASE64:3q2+7/7t+s4=") + + def test_decode_unexpected_object_type(t): + value = object() + t.assertEqual(decode_snmp_value(value), None) diff --git a/Products/ZenEvents/zentrap/tests/test_filterspec.py b/Products/ZenEvents/zentrap/tests/test_filterspec.py new file mode 100644 index 0000000000..48d8b0356f --- /dev/null +++ b/Products/ZenEvents/zentrap/tests/test_filterspec.py @@ -0,0 +1,739 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function + +# runtests -v -t unit Products.ZenEvents -m testTrapFilter + +import logging + +from unittest import TestCase + +from ..filterspec import ( + BaseFilterDefinition, + FilterSpecification, + GenericTrapFilterDefinition, + OIDBasedFilterDefinition, +) + + +class OIDBasedFilterDefinitionTest(TestCase): + def testEQByOID(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + self.assertEqual(base1, base2) + + def testEQByOIDFails(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = OIDBasedFilterDefinition(0, "include", "5.4.3.2.1") + self.assertNotEqual(base1, base2) + + def testEQByOIDIgnoresAction(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = OIDBasedFilterDefinition(0, "exclude", "1.2.3.4.5") + self.assertEqual(base1, base2) + + def testEQByOIDFailsForDifferentClass(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = BaseFilterDefinition(0, "include") + self.assertNotEqual(base1, base2) + + def testHash(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + self.assertEqual(hash(base1), hash(base2)) + + def testHashFails(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = OIDBasedFilterDefinition(0, "include", "5.4.3.2.1") + self.assertNotEqual(hash(base1), hash(base2)) + + def testHashIgnoresAction(self): + base1 = OIDBasedFilterDefinition(0, "include", "1.2.3.4.5") + base2 = OIDBasedFilterDefinition(0, "exclude", "1.2.3.4.5") + self.assertEqual(hash(base1), hash(base2)) + + +class GenericTrapFilterDefinitionTest(TestCase): + def testEQByOID(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = GenericTrapFilterDefinition(0, "include", "1") + self.assertEqual(base1, base2) + + def testEQByOIDFails(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = GenericTrapFilterDefinition(0, "include", "5") + self.assertNotEqual(base1, base2) + + def testEQByOIDIgnoresAction(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = GenericTrapFilterDefinition(0, "exclude", "1") + self.assertEqual(base1, base2) + + def testEQByOIDFailsForDifferentClass(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = BaseFilterDefinition(0, "include") + self.assertNotEqual(base1, base2) + + def testHash(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = GenericTrapFilterDefinition(0, "include", "1") + self.assertEqual(hash(base1), hash(base2)) + + def testHashFails(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = GenericTrapFilterDefinition(0, "include", "2") + self.assertNotEqual(hash(base1), hash(base2)) + + def testHashIgnoresAction(self): + base1 = GenericTrapFilterDefinition(0, "include", "1") + base2 = GenericTrapFilterDefinition(0, "exclude", "1") + self.assertEqual(hash(base1), hash(base2)) + + +class FilterSpecificationTest(TestCase): + def setUp(t): + logging.disable(logging.CRITICAL) + t.spec = FilterSpecification("localhost") + + def tearDown(t): + logging.disable(logging.NOTSET) + + def testValidateOIDForGlob(t): + results = t.spec._validateOID("*") + t.assertEqual(results, None) + + results = t.spec._validateOID("1.2.*") + t.assertEqual(results, None) + + def testValidateOIDFailsForEmptyString(t): + results = t.spec._validateOID("") + t.assertEqual(results, "Empty OID is invalid") + + def testValidateOIDFailsForSimpleNumber(t): + results = t.spec._validateOID("123") + t.assertEqual(results, "At least one '.' required") + + def testValidateOIDFailsForInvalidChars(t): + results = t.spec._validateOID("1.2.3-5.*") + t.assertEqual( + results, + "Invalid character found; only digits, '.' and '*' allowed", + ) + + def testValidateOIDFailsForDoubleDots(t): + results = t.spec._validateOID("1.2..3") + t.assertEqual(results, "Consecutive '.'s not allowed") + + def testValidateOIDFailsForInvalidGlobbing(t): + results = t.spec._validateOID("1.2.3.*.5.*") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("1.*.5") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("1.5*") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("*.") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("*.1") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("*.*") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("5*") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID("*5") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + results = t.spec._validateOID(".*") + t.assertEqual( + results, + "When using '*', only a single '*' at the end of OID is allowed", + ) + + def testParseFilterDefinitionForEmptyLine(t): + results = t.spec._parseFilterDefinition("", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, "Incomplete filter definition") + + def testParseFilterDefinitionForIncompleteLine(t): + results = t.spec._parseFilterDefinition("a b", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, "Incomplete filter definition") + + def testParseFilterDefinitionForInvalidAction(t): + results = t.spec._parseFilterDefinition("invalid V1 ignored", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual( + results, + "Invalid action 'invalid'; the only valid actions are " + "'include' or 'exclude'", + ) + + def testParseFilterDefinitionForInvalidVersion(t): + results = t.spec._parseFilterDefinition("include V4 ignored", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual( + results, + "Invalid SNMP version 'V4'; the only valid versions are " + "'v1', 'v2', or 'v3'", + ) + + def testParseFilterDefinitionForInvalidV1Definition(t): + results = t.spec._parseFilterDefinition("include V1 .", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + def testParseFilterDefinitionForCaseInsensitiveDefinition(t): + results = t.spec._parseFilterDefinition("InClude v1 3", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, None) + + def testParseFilterDefinitionForValidV1Definition(t): + results = t.spec._parseFilterDefinition("include V1 3", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, None) + + def testParseFilterDefinitionForInvalidV2Definition(t): + results = t.spec._parseFilterDefinition("include V2 .", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + def testParseFilterDefinitionForValidV2Definition(t): + results = t.spec._parseFilterDefinition("include V2 .1.3.6.1.4.*", 99) + # t.assertEqual(t.spec._eventService.sendEvent.called, False) + t.assertEqual(results, None) + + def testParseFilterDefinitionForInvalidV3Definition(t): + results = t.spec._parseFilterDefinition("include V3 .", 99) + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + def testParseFilterDefinitionForValidV3Definition(t): + results = t.spec._parseFilterDefinition("include V3 .1.3.6.1.4.*", 99) + t.assertEqual(results, None) + + def testParseV1FilterDefinitionForGenericTrap(t): + results = t.spec._parseV1FilterDefinition(99, "include", ["0"], ".*") + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 1) + t.assertEqual(len(t.spec._v1Filters), 0) + t.assertEqual(len(t.spec._v2Filters), 0) + + genericTrapDefinition = t.spec._v1Traps["0"] + t.assertIsNotNone(genericTrapDefinition) + t.assertEqual(genericTrapDefinition.lineNumber, 99) + t.assertEqual(genericTrapDefinition.action, "include") + t.assertEqual(genericTrapDefinition.genericTrap, "0") + + # Now add another to make sure we can parse more than one + results = t.spec._parseV1FilterDefinition(100, "exclude", ["5"], ".*") + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 2) + t.assertEqual(len(t.spec._v1Filters), 0) + t.assertEqual(len(t.spec._v2Filters), 0) + + genericTrapDefinition = t.spec._v1Traps["5"] + t.assertIsNotNone(genericTrapDefinition) + t.assertEqual(genericTrapDefinition.lineNumber, 100) + t.assertEqual(genericTrapDefinition.action, "exclude") + t.assertEqual(genericTrapDefinition.genericTrap, "5") + + def testParseV1FilterDefinitionEnterpriseSpecificTrap(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", ["1.2.3.*"], ".*" + ) + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 1) + t.assertEqual(len(t.spec._v2Filters), 0) + + oidLevels = 4 + mapByLevel = t.spec._v1Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 1) + + filterDef = mapByLevel["1.2.3.*"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 99) + t.assertEqual(filterDef.action, "include") + t.assertEqual(filterDef.oid, "1.2.3.*") + t.assertEqual(filterDef.specificTrap, None) + + # Add another 4-level OID + results = t.spec._parseV1FilterDefinition( + 100, "exclude", ["1.2.3.4", "25"], ".*" + ) + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 1) + t.assertEqual(len(t.spec._v2Filters), 0) + + mapByLevel = t.spec._v1Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 2) + + filterDef = mapByLevel["1.2.3.4-25"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 100) + t.assertEqual(filterDef.action, "exclude") + t.assertEqual(filterDef.oid, "1.2.3.4") + t.assertEqual(filterDef.specificTrap, "25") + + # Add a different specific trap for the same OID + results = t.spec._parseV1FilterDefinition( + 101, "exclude", ["1.2.3.4", "99"], ".*" + ) + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 1) + t.assertEqual(len(t.spec._v2Filters), 0) + + mapByLevel = t.spec._v1Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 3) + + filterDef = mapByLevel["1.2.3.4-99"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 101) + t.assertEqual(filterDef.action, "exclude") + t.assertEqual(filterDef.oid, "1.2.3.4") + t.assertEqual(filterDef.specificTrap, "99") + + # Add another single-level OID + results = t.spec._parseV1FilterDefinition(101, "exclude", ["*"], ".*") + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 2) + t.assertEqual(len(t.spec._v2Filters), 0) + + oidLevels = 1 + mapByLevel = t.spec._v1Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 1) + + filterDef = mapByLevel["*"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 101) + t.assertEqual(filterDef.action, "exclude") + t.assertEqual(filterDef.oid, "*") + t.assertEqual(filterDef.specificTrap, None) + + def testParseV1FilterDefinitionFailsForTooManyArgs(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", ["0", "1", "2"], ".*" + ) + t.assertEqual( + results, + "Too many fields found; at most 4 fields allowed for V1 filters", + ) + + def testParseV1FilterDefinitionFailsForEmptyOID(t): + results = t.spec._parseV1FilterDefinition(99, "include", [], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + results = t.spec._parseV1FilterDefinition(99, "include", [""], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + results = t.spec._parseV1FilterDefinition(99, "include", ["."], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + results = t.spec._parseV1FilterDefinition(99, "include", ["..."], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + def testParseV1FilterDefinitionFailsForInvalidOID(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", ["invalidOID"], ".*" + ) + t.assertEqual( + results, + "'invalidOID' is not a valid OID: Invalid character found; " + "only digits, '.' and '*' allowed", + ) + + def testParseV1FilterDefinitionFailsForInvalidTrap(t): + results = t.spec._parseV1FilterDefinition(99, "include", ["a"], ".*") + t.assertEqual(results, "Invalid generic trap 'a'; must be one of 0-5") + + results = t.spec._parseV1FilterDefinition(99, "include", ["6"], ".*") + t.assertEqual(results, "Invalid generic trap '6'; must be one of 0-5") + + def testParseV1FilterDefinitionFailsForConflictingTrap(t): + results = t.spec._parseV1FilterDefinition(99, "include", ["1"], ".*") + t.assertEqual(results, None) + + results = t.spec._parseV1FilterDefinition(100, "include", ["1"], ".*") + t.assertEqual( + results, + "Generic trap '1' conflicts with previous definition at line 99", + ) + + # Verify we find a conflict for generic traps where the action differs + results = t.spec._parseV1FilterDefinition(100, "exclude", ["1"], ".*") + t.assertEqual( + results, + "Generic trap '1' conflicts with previous definition at line 99", + ) + + def testParseV1FilterDefinitionFailsForConflictingOID(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", [".1.3.6.1.4.5", "2"], ".*" + ) + t.assertEqual(results, None) + + results = t.spec._parseV1FilterDefinition( + 100, "include", [".1.3.6.1.4.5", "2"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.5' conflicts with previous definition at line 99", + ) + + # Verify we find a conflict for OIDs where the action differs + results = t.spec._parseV1FilterDefinition( + 100, "exclude", [".1.3.6.1.4.5", "2"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.5' conflicts with previous definition at line 99", + ) + + results = t.spec._parseV1FilterDefinition( + 101, "include", [".1.3.6.1.4.*"], ".*" + ) + t.assertEqual(results, None) + + # Verify we find a conflict for glob-based OIDs + results = t.spec._parseV1FilterDefinition( + 102, "include", [".1.3.6.1.4.*"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.*' conflicts with previous definition at line 101", + ) + + # Verify we find a conflict for glob-based OIDs where the + # action differs. + results = t.spec._parseV1FilterDefinition( + 102, "exclude", [".1.3.6.1.4.*"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.*' conflicts with previous definition at line 101", + ) + + def testParseV1FilterDefinitionFailsForEnterpriseSpecificGlob(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", [".1.3.6.1.4.5.*", "23"], ".*" + ) + t.assertEqual(results, "Specific trap not allowed with globbed OID") + + def testParseV1FilterDefinitionFailsForInvalidEnterpriseSpecificTrap(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", [".1.3.6.1.4.5", "abc"], ".*" + ) + t.assertEqual( + results, + "Specific trap 'abc' invalid; must be non-negative integer", + ) + + results = t.spec._parseV1FilterDefinition( + 99, "include", [".1.3.6.1.4.5", "-1"], ".*" + ) + t.assertEqual( + results, "Specific trap '-1' invalid; must be non-negative integer" + ) + + def testParseV1FilterDefinitionForSpecificOid(t): + results = t.spec._parseV1FilterDefinition( + 99, "include", [".1.3.6.1.4.5"], ".*" + ) + t.assertEqual(results, None) + + def testParseV2FilterDefinition(t): + results = t.spec._parseV2FilterDefinition( + 99, "include", ["1.2.3.*"], ".*" + ) + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 0) + t.assertEqual(len(t.spec._v2Filters), 1) + + oidLevels = 4 + mapByLevel = t.spec._v2Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 1) + + filterDef = mapByLevel["1.2.3.*"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 99) + t.assertEqual(filterDef.action, "include") + t.assertEqual(filterDef.oid, "1.2.3.*") + + # Add another 4-level OID + results = t.spec._parseV2FilterDefinition( + 100, "exclude", ["1.2.3.4"], ".*" + ) + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 0) + t.assertEqual(len(t.spec._v2Filters), 1) + + mapByLevel = t.spec._v2Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 2) + + filterDef = mapByLevel["1.2.3.4"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 100) + t.assertEqual(filterDef.action, "exclude") + t.assertEqual(filterDef.oid, "1.2.3.4") + + # Add another single-level OID + results = t.spec._parseV2FilterDefinition(101, "exclude", ["*"], ".*") + t.assertEqual(results, None) + t.assertEqual(len(t.spec._v1Traps), 0) + t.assertEqual(len(t.spec._v1Filters), 0) + t.assertEqual(len(t.spec._v2Filters), 2) + + oidLevels = 1 + mapByLevel = t.spec._v2Filters[oidLevels] + t.assertIsNotNone(mapByLevel) + t.assertEqual(len(mapByLevel), 1) + + filterDef = mapByLevel["*"] + t.assertIsNotNone(filterDef) + t.assertEqual(filterDef.lineNumber, 101) + t.assertEqual(filterDef.action, "exclude") + t.assertEqual(filterDef.oid, "*") + + def testParseV2FilterDefinitionFailsForTooManyArgs(t): + results = t.spec._parseV2FilterDefinition( + 99, "include", ["0", "1"], ".*" + ) + t.assertEqual( + results, + "Too many fields found; at most 3 fields allowed for V2 filters", + ) + + def testParseV2FilterDefinitionFailsForEmptyOID(t): + results = t.spec._parseV2FilterDefinition(99, "include", [], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + results = t.spec._parseV2FilterDefinition(99, "include", [""], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + results = t.spec._parseV2FilterDefinition(99, "include", ["."], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + results = t.spec._parseV2FilterDefinition(99, "include", ["..."], ".*") + t.assertEqual(results, "'' is not a valid OID: Empty OID is invalid") + + def testParseV2FilterDefinitionFailsForInvalidOID(t): + results = t.spec._parseV2FilterDefinition( + 99, "include", ["invalidOID"], ".*" + ) + t.assertEqual( + results, + "'invalidOID' is not a valid OID: Invalid character found; " + "only digits, '.' and '*' allowed", + ) + + def testParseV2FilterDefinitionFailsForConflictingOID(t): + results = t.spec._parseV2FilterDefinition( + 99, "include", [".1.3.6.1.4.5"], ".*" + ) + t.assertEqual(results, None) + + results = t.spec._parseV2FilterDefinition( + 100, "include", [".1.3.6.1.4.5"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.5' conflicts with previous definition at line 99", + ) + + # Verify we find a conflict for OIDs where the action differs + results = t.spec._parseV2FilterDefinition( + 100, "exclude", [".1.3.6.1.4.5"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.5' conflicts with previous definition at line 99", + ) + + results = t.spec._parseV2FilterDefinition( + 101, "include", [".1.3.6.1.4.*"], ".*" + ) + t.assertEqual(results, None) + + # Verify we find a conflict for glob-based OIDs + results = t.spec._parseV2FilterDefinition( + 102, "include", [".1.3.6.1.4.*"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.*' conflicts with previous definition at line 101", + ) + + # Verify we find a conflict for glob-based OIDs where the + # action differs + results = t.spec._parseV2FilterDefinition( + 102, "exclude", [".1.3.6.1.4.*"], ".*" + ) + t.assertEqual( + results, + "OID '1.3.6.1.4.*' conflicts with previous definition at line 101", + ) + + +class TestPersistentFilterSpecProperties(TestCase): + """ + Test that the `v1traps`, `v1filters`, and `v2filters` properties are + updated after a call to `update_from_string` with a different config. + """ + + _rules_v1 = "\n".join( + ( + "include v1 0", + "include v1 1", + "include v1 2", + "include v1 3", + "include v1 4", + "include v1 5", + "include v1 *", + "include v2 *", + ) + ) + + _rules_v2 = "\n".join( + ( + "include v1 0", + "include v1 1", + "exclude v1 2", + "include v1 3", + "include v1 4", + "include v1 5", + "include v1 *", + "include v2 *", + "exclude v1 1.3.6.1.4.1.9.9.41.2.0.*", + "exclude v2 1.3.6.1.4.1.9.9.43.1.3.*", + ) + ) + + def setUp(t): + t.spec = FilterSpecification("localhost") + t.spec.update_from_string(t._rules_v1) + + def test_v1trap_property(t): + reference = t.spec.v1traps + original = dict(reference) + t.spec.update_from_string(t._rules_v2) + + t.assertEqual(len(original), len(t.spec.v1traps)) + t.assertEqual(len(reference), len(t.spec.v1traps)) + t.assertFalse( + all( + (kr == kl) and _eq_v1generic(vr, vl) + for (kr, vr), (kl, vl) in zip( + sorted(original.items()), sorted(t.spec.v1traps.items()) + ) + ) + ) + t.assertTrue( + all( + (kr == kl) and _eq_v1generic(vr, vl) + for (kr, vr), (kl, vl) in zip( + sorted(reference.items()), sorted(t.spec.v1traps.items()) + ) + ) + ) + + def test_v1filters_property(t): + reference = t.spec.v1filters + original = dict(reference) + t.spec.update_from_string(t._rules_v2) + + t.assertNotEqual(len(original), len(t.spec.v1filters)) + t.assertEqual(len(reference), len(t.spec.v1filters)) + t.assertTrue( + all( + (kr == kl) and _eq_v1enterprise(vr, vl) + for (kr, vr), (kl, vl) in zip( + sorted(reference.items()), sorted(t.spec.v1filters.items()) + ) + ) + ) + + def test_v2filters_property(t): + reference = t.spec.v2filters + original = dict(reference) + t.spec.update_from_string(t._rules_v2) + + t.assertNotEqual(len(original), len(t.spec.v2filters)) + t.assertEqual(len(reference), len(t.spec.v2filters)) + t.assertTrue( + all( + (kr == kl) and _eq_v2(vr, vl) + for (kr, vr), (kl, vl) in zip( + sorted(reference.items()), sorted(t.spec.v2filters.items()) + ) + ) + ) + + +def _eq_v1generic(lv, rv): + return (lv.genericTrap == rv.genericTrap) and (lv.action == rv.action) + + +def _eq_v1enterprise(lv, rv): + return all( + (k1 == k2) + and (v1.action == v2.action) + and (v1.oid == v2.oid) + and (v1.specificTrap == v2.specificTrap) + for (k1, v1), (k2, v2) in zip(sorted(lv.items()), sorted(rv.items())) + ) + + +def _eq_v2(lv, rv): + return all( + (k1 == k2) + and (v1.action == v2.action) + and (v1.oid == v2.oid) + for (k1, v1), (k2, v2) in zip(sorted(lv.items()), sorted(rv.items())) + ) diff --git a/Products/ZenEvents/zentrap/tests/test_handlers.py b/Products/ZenEvents/zentrap/tests/test_handlers.py new file mode 100644 index 0000000000..19faf6a9c5 --- /dev/null +++ b/Products/ZenEvents/zentrap/tests/test_handlers.py @@ -0,0 +1,898 @@ +import logging + +from unittest import TestCase + +import six + +from mock import Mock + +from ..handlers import ReplayTrapHandler +from ..net import FakePacket, SNMPv1, SNMPv2 +from ..oidmap import OidMap +from ..processors import ( + LegacyVarbindProcessor, + DirectVarbindProcessor, + MixedVarbindProcessor, +) + + +class _Common(TestCase): + def setUp(t): + logging.disable(logging.CRITICAL) + t.app = Mock() + t.oidmap = OidMap(t.app) + t.eventservice = Mock() + t.stats = Mock() + t.monitor = "localhost" + + def tearDown(t): + logging.disable(logging.NOTSET) + + +class _SnmpV1Base(_Common): + def makeInputs(t, trapType=6, oidMap=None, variables=(), copymode=None): + oidMap = oidMap if oidMap is not None else {} + pckt = FakePacket() + pckt.version = SNMPv1 + pckt.host = "localhost" + pckt.port = 162 + pckt.variables = variables + pckt.community = "" + pckt.enterprise_length = 0 + + # extra fields for SNMPv1 packets + pckt.agent_addr = [192, 168, 24, 4] + pckt.trap_type = trapType + pckt.specific_type = 5 + pckt.enterprise = "1.2.3.4" + pckt.enterprise_length = len(pckt.enterprise) + pckt.community = "community" + + t.oidmap._oidmap = oidMap + handler = ReplayTrapHandler( + t.oidmap, copymode, t.monitor, t.eventservice + ) + handler.stats = t.stats + return pckt, handler + + +class TestDecodeSnmpV1(_SnmpV1Base): + def test_NoAgentAddr(t): + pckt, handler = t.makeInputs() + del pckt.agent_addr + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(result["device"], "localhost") + + def test_FieldsNoMappingUsed(t): + pckt, handler = t.makeInputs() + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + + t.assertEqual(result["device"], "192.168.24.4") + t.assertEqual(result["snmpVersion"], "1") + t.assertEqual(result["snmpV1Enterprise"], "1.2.3.4") + t.assertEqual(result["snmpV1GenericTrapType"], 6) + t.assertEqual(result["snmpV1SpecificTrap"], 5) + t.assertEqual(eventType, "1.2.3.4.5") + t.assertEqual(result["oid"], "1.2.3.4.5") + + def test_EnterpriseOIDWithExtraZero(t): + pckt, handler = t.makeInputs(oidMap={"1.2.3.4.0.5": "testing"}) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "testing") + t.assertEqual(result["oid"], "1.2.3.4.0.5") + + def test_TrapType0(t): + pckt, handler = t.makeInputs(trapType=0) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "coldStart") + t.assertEqual(result["snmpV1GenericTrapType"], 0) + + def test_TrapType1(t): + pckt, handler = t.makeInputs(trapType=1) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "warmStart") + t.assertEqual(result["snmpV1GenericTrapType"], 1) + + def test_TrapType2(t): + pckt, handler = t.makeInputs(trapType=2) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "snmp_linkDown") + t.assertEqual(result["snmpV1GenericTrapType"], 2) + + def test_TrapType3(t): + pckt, handler = t.makeInputs(trapType=3) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "snmp_linkUp") + t.assertEqual(result["snmpV1GenericTrapType"], 3) + + def test_TrapType4(t): + pckt, handler = t.makeInputs(trapType=4) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "authenticationFailure") + t.assertEqual(result["snmpV1GenericTrapType"], 4) + + def test_TrapType5(t): + pckt, handler = t.makeInputs(trapType=5) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "egpNeighorLoss") + t.assertEqual(result["snmpV1GenericTrapType"], 5) + + def test_TrapType6(t): + pckt, handler = t.makeInputs(trapType=6) + eventType, result = handler.decodeSnmpv1(("localhost", 162), pckt) + t.assertEqual(eventType, "1.2.3.4.5") + t.assertEqual(result["snmpV1GenericTrapType"], 6) + + +class _SnmpV2Base(_Common): + baseOidMap = { + # Std var binds in SnmpV2 traps/notifications + "1.3.6.1.2.1.1.3": "sysUpTime", + "1.3.6.1.6.3.1.1.4.1": "snmpTrapOID", + # SnmpV2 Traps (snmpTrapOID.0 values) + "1.3.6.1.6.3.1.1.5.1": "coldStart", + "1.3.6.1.6.3.1.1.5.2": "warmStart", + "1.3.6.1.6.3.1.1.5.3": "linkDown", + "1.3.6.1.6.3.1.1.5.4": "linkUp", + "1.3.6.1.6.3.1.1.5.5": "authenticationFailure", + "1.3.6.1.6.3.1.1.5.6": "egpNeighborLoss", + } + + def makePacket(t, trapOID, variables=()): + pckt = FakePacket() + pckt.version = SNMPv2 + pckt.host = "localhost" + pckt.port = 162 + + if isinstance(trapOID, six.string_types): + trapOID = tuple(map(int, trapOID.split("."))) + pckt.variables = [ + ((1, 3, 6, 1, 2, 1, 1, 3, 0), 5342), + ((1, 3, 6, 1, 6, 3, 1, 1, 4, 1, 0), trapOID), + ] + pckt.variables.extend(variables) + pckt.community = "public" + pckt.enterprise_length = 0 + return pckt + + def makeHandler(t, extraOidMap=None, copymode=None): + oidmap = t.baseOidMap.copy() + if extraOidMap: + oidmap.update(extraOidMap) + t.oidmap._oidmap = oidmap + handler = ReplayTrapHandler( + t.oidmap, copymode, t.monitor, t.eventservice + ) + handler.stats = t.stats + return handler + + def makeInputs( + t, + trapOID="1.3.6.1.6.3.1.1.5.1", + variables=(), + oidMap=None, + copymode=None, + ): + oidMap = oidMap if oidMap is not None else {} + pckt = t.makePacket(trapOID=trapOID, variables=variables) + handler = t.makeHandler(extraOidMap=oidMap, copymode=copymode) + return pckt, handler + + +class TestDecodeSnmpV2OrV3(_SnmpV2Base): + def test_UnknownTrapType(t): + pckt, handler = t.makeInputs(trapOID="1.2.3") + eventType, result = handler.decodeSnmpV2OrV3(("localhost", 162), pckt) + t.assertIn("snmpVersion", result) + t.assertEqual(result["snmpVersion"], "2") + t.assertEqual(eventType, "1.2.3") + t.assertIn("snmpVersion", result) + t.assertIn("oid", result) + t.assertIn("device", result) + t.assertEqual(result["snmpVersion"], "2") + t.assertEqual(result["oid"], "1.2.3") + t.assertEqual(result["device"], "localhost") + + def test_KnownTrapType(t): + pckt, handler = t.makeInputs(trapOID="1.3.6.1.6.3.1.1.5.1") + eventType, result = handler.decodeSnmpV2OrV3(("localhost", 162), pckt) + t.assertIn("oid", result) + t.assertEqual(eventType, "coldStart") + t.assertEqual(result["oid"], "1.3.6.1.6.3.1.1.5.1") + + def test_TrapAddressOID(t): + pckt, handler = t.makeInputs( + trapOID="1.3.6.1.6.3.1.1.5.1", + variables=(((1, 3, 6, 1, 6, 3, 18, 1, 3), "192.168.51.100"),), + oidMap={"1.3.6.1.6.3.18.1.3": "snmpTrapAddress"}, + ) + eventType, result = handler.decodeSnmpV2OrV3(("localhost", 162), pckt) + t.assertIn("snmpTrapAddress", result) + t.assertEqual(result["snmpTrapAddress"], "192.168.51.100") + t.assertEqual(result["device"], "192.168.51.100") + + def test_RenamedLinkDown(t): + pckt, handler = t.makeInputs(trapOID="1.3.6.1.6.3.1.1.5.3") + eventType, result = handler.decodeSnmpV2OrV3(("localhost", 162), pckt) + t.assertIn("oid", result) + t.assertEqual(eventType, "snmp_linkDown") + t.assertEqual(result["oid"], "1.3.6.1.6.3.1.1.5.3") + + def test_RenamedLinkUp(t): + pckt, handler = t.makeInputs(trapOID="1.3.6.1.6.3.1.1.5.4") + eventType, result = handler.decodeSnmpV2OrV3(("localhost", 162), pckt) + t.assertIn("oid", result) + t.assertEqual(eventType, "snmp_linkUp") + t.assertEqual(result["oid"], "1.3.6.1.6.3.1.1.5.4") + + def test_PartialNamedVarBindNoneValue(t): + pckt = t.makePacket("1.3.6.1.6.3.1.1.5.3") + pckt.variables.append( + ((1, 2, 6, 0), None), + ) + t.oidmap._oidmap = {"1.2.6.0": "testVar"} + handler = ReplayTrapHandler( + t.oidmap, None, t.monitor, t.eventservice + ) + handler.stats = t.stats + eventType, result = handler.decodeSnmpV2OrV3(("localhost", 162), pckt) + totalVarKeys = sum(1 for k in result if k.startswith("testVar")) + t.assertEqual(totalVarKeys, 1) + t.assertIn("testVar", result) + t.assertEqual(result["testVar"], "None") + + +class _VarbindTests(object): + def case_unknown_id_single(t): + variables = (((1, 2, 6, 7), "foo"),) + tests = ( + t.makeInputs( + variables=variables, copymode=MixedVarbindProcessor.MODE + ), + t.makeInputs( + variables=variables, copymode=DirectVarbindProcessor.MODE + ), + t.makeInputs( + variables=variables, copymode=LegacyVarbindProcessor.MODE + ), + ) + for test in tests: + result = yield test + totalVarKeys = sum(1 for k in result if k.startswith("1.2.6")) + t.assertEqual(totalVarKeys, 1) + t.assertEqual(result["1.2.6.7"], "foo") + + def case_unknown_id_repeated(t): + variables = ( + ((1, 2, 6, 7), "foo"), + ((1, 2, 6, 7), "bar"), + ((1, 2, 6, 7), "baz"), + ) + tests = ( + t.makeInputs( + variables=variables, copymode=MixedVarbindProcessor.MODE + ), + t.makeInputs( + variables=variables, copymode=DirectVarbindProcessor.MODE + ), + t.makeInputs( + variables=variables, copymode=LegacyVarbindProcessor.MODE + ), + ) + for test in tests: + result = yield test + totalVarKeys = sum(1 for k in result if k.startswith("1.2.6")) + t.assertEqual(totalVarKeys, 1) + t.assertEqual(result["1.2.6.7"], "foo,bar,baz") + + def case_unknown_ids_multiple(t): + variables = ( + ((1, 2, 6, 0), "foo"), + ((1, 2, 6, 1), "bar"), + ) + tests = ( + t.makeInputs( + variables=variables, copymode=MixedVarbindProcessor.MODE + ), + t.makeInputs( + variables=variables, copymode=DirectVarbindProcessor.MODE + ), + t.makeInputs( + variables=variables, copymode=LegacyVarbindProcessor.MODE + ), + ) + expected_results = ( + { + "1.2.6.0": "foo", + "1.2.6.1": "bar", + }, + { + "1.2.6.0": "foo", + "1.2.6.1": "bar", + }, + { + "1.2.6.0": "foo", + "1.2.6.1": "bar", + }, + ) + for test, expected in zip(tests, expected_results): + result = yield test + totalVarKeys = sum(1 for k in result if k.startswith("1.2.6")) + t.assertEqual(totalVarKeys, 2) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_one_id(t): + variables = (((1, 2, 6, 7), "foo"),) + oidMap = {"1.2.6.7": "testVar"} + tests = ( + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + {"testVar": "foo"}, + {"testVar": "foo"}, + {"testVar": "foo"}, + ) + for test, expected in zip(tests, expected_results): + result = yield test + totalVarKeys = sum(1 for k in result if k.startswith("testVar")) + t.assertEqual(totalVarKeys, 1) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_one_id_one_sub_id(t): + oidMap = {"1.2.6": "testVar"} + variables = (((1, 2, 6, 5), "foo"),) + tests = ( + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + { + "testVar": "foo", + "testVar.ifIndex": "5", + }, + { + "testVar.5": "foo", + "testVar.sequence": "5", + }, + { + "testVar": "foo", + "testVar.ifIndex": "5", + }, + ) + for test, expected in zip(tests, expected_results): + result = yield test + count = sum(1 for k in result if k.startswith("testVar")) + t.assertEqual(count, len(expected.keys())) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_one_id_multiple_sub_ids(t): + oidMap = {"1.2.6": "testVar"} + variables_one = ( + ((1, 2, 6, 0), "foo"), + ((1, 2, 6, 1), "bar"), + ((1, 2, 6, 2), "baz"), + ) + variables_two = ( + ((1, 2, 6, 3), "foo"), + ((1, 2, 6, 3), "bar"), + ) + + tests = ( + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + { + "testVar.0": "foo", + "testVar.1": "bar", + "testVar.2": "baz", + "testVar.sequence": "0,1,2", + }, + { + "testVar.3": "foo,bar", + "testVar.sequence": "3,3", + }, + { + "testVar.0": "foo", + "testVar.1": "bar", + "testVar.2": "baz", + "testVar.sequence": "0,1,2", + }, + { + "testVar.3": "foo,bar", + "testVar.sequence": "3,3", + }, + { + "testVar": "foo,bar,baz", + "testVar.ifIndex": "0,1,2", + }, + { + "testVar": "foo,bar", + "testVar.ifIndex": "3,3", + }, + ) + for test, expected in zip(tests, expected_results): + result = yield test + count = sum(1 for k in result if k.startswith("testVar")) + t.assertEqual(count, len(expected.keys())) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_multiple_ids(t): + oidMap = { + "1.2.6": "foo", + "1.2.7": "bar", + } + variables = ( + ((1, 2, 6), "is a foo"), + ((1, 2, 7), "lower the bar"), + ) + tests = ( + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + { + "foo": "is a foo", + "bar": "lower the bar", + }, + { + "foo": "is a foo", + "bar": "lower the bar", + }, + { + "foo": "is a foo", + "bar": "lower the bar", + }, + ) + for test, expected in zip(tests, expected_results): + result = yield test + count = sum( + 1 for k in result if k.startswith("bar") or k.startswith("foo") + ) + t.assertEqual(count, len(expected.keys())) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_multiple_ids_one_sub_id_each(t): + oidMap = { + "1.2.6": "foo", + "1.2.7": "bar", + } + variables_one = ( + ((1, 2, 6, 0), "is a foo"), + ((1, 2, 7, 2), "lower the bar"), + ) + variables_two = ( + ((1, 2, 6, 0, 1), "is a foo"), + ((1, 2, 7, 2, 1), "lower the bar"), + ) + tests = ( + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + { + "foo": "is a foo", + "foo.ifIndex": "0", + "bar": "lower the bar", + "bar.ifIndex": "2", + }, + { + "foo": "is a foo", + "foo.ifIndex": "0.1", + "bar": "lower the bar", + "bar.ifIndex": "2.1", + }, + { + "foo.0": "is a foo", + "foo.sequence": "0", + "bar.2": "lower the bar", + "bar.sequence": "2", + }, + { + "foo.0.1": "is a foo", + "foo.sequence": "0.1", + "bar.2.1": "lower the bar", + "bar.sequence": "2.1", + }, + { + "foo": "is a foo", + "foo.ifIndex": "0", + "bar": "lower the bar", + "bar.ifIndex": "2", + }, + { + "foo": "is a foo", + "foo.ifIndex": "0.1", + "bar": "lower the bar", + "bar.ifIndex": "2.1", + }, + ) + for test, expected in zip(tests, expected_results): + result = yield test + count = sum( + 1 for k in result if k.startswith("bar") or k.startswith("foo") + ) + t.assertEqual(count, len(expected.keys())) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_multiple_ids_multiple_sub_ids(t): + oidMap = { + "1.2.6": "foo", + "1.2.7": "bar", + } + variables_one = ( + ((1, 2, 6, 0, 1), "here a foo"), + ((1, 2, 6, 1, 1), "there a foo"), + ((1, 2, 7, 2, 1), "lower the bar"), + ((1, 2, 7, 2, 2), "raise the bar"), + ) + variables_two = ( + ((1, 2, 6, 0), "here a foo"), + ((1, 2, 6, 0), "there a foo"), + ((1, 2, 7, 3), "lower the bar"), + ((1, 2, 7, 3), "raise the bar"), + ) + tests = ( + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_one, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables_two, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + { + "foo.0.1": "here a foo", + "foo.1.1": "there a foo", + "foo.sequence": "0.1,1.1", + "bar.2.1": "lower the bar", + "bar.2.2": "raise the bar", + "bar.sequence": "2.1,2.2", + }, + { + "foo.0": "here a foo,there a foo", + "foo.sequence": "0,0", + "bar.3": "lower the bar,raise the bar", + "bar.sequence": "3,3", + }, + { + "foo.0.1": "here a foo", + "foo.1.1": "there a foo", + "foo.sequence": "0.1,1.1", + "bar.2.1": "lower the bar", + "bar.2.2": "raise the bar", + "bar.sequence": "2.1,2.2", + }, + { + "foo.0": "here a foo,there a foo", + "foo.sequence": "0,0", + "bar.3": "lower the bar,raise the bar", + "bar.sequence": "3,3", + }, + { + "foo": "here a foo,there a foo", + "foo.ifIndex": "0.1,1.1", + "bar": "lower the bar,raise the bar", + "bar.ifIndex": "2.1,2.2", + }, + { + "foo": "here a foo,there a foo", + "foo.ifIndex": "0,0", + "bar": "lower the bar,raise the bar", + "bar.ifIndex": "3,3", + }, + ) + for test, expected in zip(tests, expected_results): + result = yield test + count = sum( + 1 for k in result if k.startswith("bar") or k.startswith("foo") + ) + t.assertEqual(count, len(expected.keys())) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + def case_ifentry_trap(t): + oidMap = { + "1.3.6.1.2.1.2.2.1.1": "ifIndex", + "1.3.6.1.2.1.2.2.1.7": "ifAdminStatus", + "1.3.6.1.2.1.2.2.1.8": "ifOperStatus", + "1.3.6.1.2.1.2.2.1.2": "ifDescr", + "1.3.6.1.2.1.31.1.1.1.18": "ifAlias", + } + variables = ( + ((1, 3, 6, 1, 2, 1, 2, 2, 1, 1, 143), 143), + ((1, 3, 6, 1, 2, 1, 2, 2, 1, 7, 143), 2), + ((1, 3, 6, 1, 2, 1, 2, 2, 1, 8, 143), 2), + ((1, 3, 6, 1, 2, 1, 2, 2, 1, 2, 143), "F23"), + ((1, 3, 6, 1, 2, 1, 31, 1, 1, 1, 18, 143), ""), + ) + tests = ( + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=MixedVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=DirectVarbindProcessor.MODE, + ), + t.makeInputs( + variables=variables, + oidMap=oidMap, + copymode=LegacyVarbindProcessor.MODE, + ), + ) + expected_results = ( + { + "ifIndex": "143", + "ifIndex.ifIndex": "143", + "ifAdminStatus": "2", + "ifAdminStatus.ifIndex": "143", + "ifOperStatus": "2", + "ifOperStatus.ifIndex": "143", + "ifDescr": "F23", + "ifDescr.ifIndex": "143", + "ifAlias": "", + "ifAlias.ifIndex": "143", + }, + { + "ifIndex.143": "143", + "ifIndex.sequence": "143", + "ifAdminStatus.143": "2", + "ifAdminStatus.sequence": "143", + "ifOperStatus.143": "2", + "ifOperStatus.sequence": "143", + "ifDescr.143": "F23", + "ifDescr.sequence": "143", + "ifAlias.143": "", + "ifAlias.sequence": "143", + }, + { + "ifIndex": "143", + "ifIndex.ifIndex": "143", + "ifAdminStatus": "2", + "ifAdminStatus.ifIndex": "143", + "ifOperStatus": "2", + "ifOperStatus.ifIndex": "143", + "ifDescr": "F23", + "ifDescr.ifIndex": "143", + "ifAlias": "", + "ifAlias.ifIndex": "143", + }, + ) + + for test, expected in zip(tests, expected_results): + result = yield test + count = sum( + 1 + for k in result + if k.startswith("ifIndex") + or k.startswith("ifAdminStatus") + or k.startswith("ifOperStatus") + or k.startswith("ifDescr") + or k.startswith("ifAlias") + ) + t.assertEqual(count, len(expected.keys())) + for key, value in expected.items(): + t.assertIn(key, result) + t.assertEqual(value, result[key]) + + +class TestSnmpV1VarbindHandling(_SnmpV1Base, _VarbindTests): + def _execute(t, cases): + try: + pckt, handler = next(cases) + while True: + eventType, result = handler.decodeSnmpv1( + ("localhost", 162), pckt + ) + pckt, handler = cases.send(result) + except StopIteration: + pass + + def test_unknown_id_single(t): + t._execute(t.case_unknown_id_single()) + + def test_unknown_id_repeated(t): + t._execute(t.case_unknown_id_repeated()) + + def test_unknown_ids_multiple(t): + t._execute(t.case_unknown_ids_multiple()) + + def test_one_id(t): + t._execute(t.case_one_id()) + + def test_one_id_one_sub_id(t): + t._execute(t.case_one_id_one_sub_id()) + + def test_one_id_multiple_sub_ids(t): + t._execute(t.case_one_id_multiple_sub_ids()) + + def test_multiple_ids(t): + t._execute(t.case_multiple_ids()) + + def test_multiple_ids_one_sub_id_each(t): + t._execute(t.case_multiple_ids_one_sub_id_each()) + + def test_multiple_ids_multiple_sub_ids(t): + t._execute(t.case_multiple_ids_multiple_sub_ids()) + + def test_ifentry_trap(t): + t._execute(t.case_ifentry_trap()) + + +class TestSnmpV2VarbindHandling(_SnmpV2Base, _VarbindTests): + def _execute(t, cases): + try: + pckt, handler = next(cases) + while True: + eventType, result = handler.decodeSnmpV2OrV3( + ("localhost", 162), pckt + ) + pckt, handler = cases.send(result) + except StopIteration: + pass + + def test_unknown_id_single(t): + t._execute(t.case_unknown_id_single()) + + def test_unknown_id_repeated(t): + t._execute(t.case_unknown_id_repeated()) + + def test_unknown_ids_multiple(t): + t._execute(t.case_unknown_ids_multiple()) + + def test_one_id(t): + t._execute(t.case_one_id()) + + def test_one_id_one_sub_id(t): + t._execute(t.case_one_id_one_sub_id()) + + def test_one_id_multiple_sub_ids(t): + t._execute(t.case_one_id_multiple_sub_ids()) + + def test_multiple_ids(t): + t._execute(t.case_multiple_ids()) + + def test_multiple_ids_one_sub_id_each(t): + t._execute(t.case_multiple_ids_one_sub_id_each()) + + def test_multiple_ids_multiple_sub_ids(t): + t._execute(t.case_multiple_ids_multiple_sub_ids()) + + def test_ifentry_trap(t): + t._execute(t.case_ifentry_trap()) diff --git a/Products/ZenEvents/zentrap/tests/test_oidmap.py b/Products/ZenEvents/zentrap/tests/test_oidmap.py new file mode 100644 index 0000000000..49c7f28b80 --- /dev/null +++ b/Products/ZenEvents/zentrap/tests/test_oidmap.py @@ -0,0 +1,58 @@ +import logging + +from unittest import TestCase + +from mock import Mock + +from ..oidmap import OidMap + + +class TestOidMap(TestCase): + def setUp(t): + logging.disable(logging.CRITICAL) + t.app = Mock() + t.oidmap = OidMap(t.app) + + def tearDown(t): + logging.disable(logging.NOTSET) + + def test_NoExactMatch(t): + t.assertEqual(t.oidmap.to_name(".1.2.3.4"), "1.2.3.4") + t.assertEqual(t.oidmap.to_name(".1.2.3.4", strip=True), "1.2.3.4") + + def test_HasExactMatch(t): + t.oidmap._oidmap = {"1.2.3.4": "Zenoss.Test.exactMatch"} + result = t.oidmap.to_name(".1.2.3.4") + t.assertEqual(result, "Zenoss.Test.exactMatch") + result = t.oidmap.to_name(".1.2.3.4", strip=True) + t.assertEqual(result, "Zenoss.Test.exactMatch") + + def test_NoInexactMatch(t): + t.oidmap._oidmap = {"1.2.3.4": "Zenoss.Test.exactMatch"} + result = t.oidmap.to_name(".1.5.3.4", exactMatch=False) + t.assertEqual(result, "1.5.3.4") + + def test_HasInexactMatchNotStripped(t): + t.oidmap._oidmap = { + "1.2": "Zenoss", + "1.2.3": "Zenoss.Test", + "1.2.3.2": "Zenoss.Test.inexactMatch" + } + result = t.oidmap.to_name(".1.2.3.2.5", exactMatch=False) + t.assertEqual(result, "Zenoss.Test.inexactMatch.5") + result = t.oidmap.to_name(".1.2.3.2.5.6", exactMatch=False) + t.assertEqual(result, "Zenoss.Test.inexactMatch.5.6") + + def test_HasInexactMatchStripped(t): + t.oidmap._oidmap = { + "1.2": "Zenoss", + "1.2.3": "Zenoss.Test", + "1.2.3.2": "Zenoss.Test.inexactMatch" + } + result = t.oidmap.to_name(".1.2.3.2.5", exactMatch=False, strip=True) + t.assertEqual(result, "Zenoss.Test.inexactMatch") + result = t.oidmap.to_name(".1.2.3.2.5.6", exactMatch=False, strip=True) + t.assertEqual(result, "Zenoss.Test.inexactMatch") + + def test_AcceptsTuple(t): + t.assertEqual(t.oidmap.to_name((1, 2, 3, 4)), "1.2.3.4") diff --git a/Products/ZenEvents/zentrap/tests/test_trapfilter.py b/Products/ZenEvents/zentrap/tests/test_trapfilter.py new file mode 100644 index 0000000000..d509408fe7 --- /dev/null +++ b/Products/ZenEvents/zentrap/tests/test_trapfilter.py @@ -0,0 +1,672 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2015, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +# runtests -v -t unit Products.ZenEvents -m testTrapFilter + +import logging + +from unittest import TestCase + +from mock import Mock + +from Products.ZenEvents.EventManagerBase import EventManagerBase +from Products.ZenHub.interfaces import TRANSFORM_CONTINUE, TRANSFORM_DROP + +from ..filterspec import ( + FilterSpecification, + GenericTrapFilterDefinition, + V1FilterDefinition, + V2FilterDefinition, +) +from ..trapfilter import TrapFilter + + +class TrapFilterTest(TestCase): + def setUp(t): + t.app = Mock() + t.monitor = "localhost" + t.spec = FilterSpecification(t.monitor) + t.filter = TrapFilter(t.app, t.spec) + logging.disable(logging.CRITICAL) + + def tearDown(t): + logging.disable(logging.NOTSET) + + def testDropV1EventForGenericTrapInclusion(t): + genericTrap = 0 + filterDef = GenericTrapFilterDefinition(99, "include", genericTrap) + t.filter._filterspec._v1Traps[genericTrap] = filterDef + + event = {"snmpVersion": "1", "snmpV1GenericTrapType": genericTrap} + t.assertFalse(t.filter._dropEvent(event)) + + def testDropV1EventForGenericTrapForExclusion(t): + genericTrap = 1 + filterDef = GenericTrapFilterDefinition(99, "exclude", genericTrap) + t.filter._filterspec._v1Traps[genericTrap] = filterDef + + event = {"snmpVersion": "1", "snmpV1GenericTrapType": genericTrap} + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForGenericTrapForNoMatch(t): + genericTrap = 1 + filterDef = GenericTrapFilterDefinition(99, "exclude", genericTrap) + t.filter._filterspec._v1Traps[genericTrap] = filterDef + + event = {"snmpVersion": "1", "snmpV1GenericTrapType": 2} + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForEnterpriseSimpleGlobMatch(t): + filterDef = V1FilterDefinition(99, "exclude", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[4] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1.2.3.4", + } + t.assertTrue(t.filter._dropEvent(event)) + + filterDef.action = "include" + t.assertFalse(t.filter._dropEvent(event)) + + # This test uses 1 filters for each of two OID levels where the filter + # specifies a glob match + def testDropV1EventForSimpleGlobMatches(t): + filterDef = V1FilterDefinition(99, "include", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[4] = filtersByLevel + + filterDef = V1FilterDefinition(99, "include", "1.2.3.4.5.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[6] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1.2.3.4", + } + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.99" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.99.5" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4.99" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4.5" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4.5.99" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.99.4" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.99.4.5.6" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventIncludeAll(t): + filterDef = V1FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1", + } + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1." + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3" + t.assertFalse(t.filter._dropEvent(event)) + + def testDropV1EventExcludeAll(t): + filterDef = V1FilterDefinition(99, "exclude", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1", + } + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventExcludeAllBut(t): + filterDef = V1FilterDefinition(99, "exclude", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + filterDef = V1FilterDefinition(99, "include", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[4] = filtersByLevel + + filterDef = V1FilterDefinition(99, "include", "1.4.5") + filterDef.specificTrap = "*" + filtersByLevel = {"1.4.5-*": filterDef} + t.filter._filterspec._v1Filters[3] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1", + } + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.4.5.1" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.4.5" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.4.5" + event["snmpV1SpecificTrap"] = 23 + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4.5" + t.assertFalse(t.filter._dropEvent(event)) + + def testDropV1EventIncludeAllBut(t): + filterDef = V1FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + filterDef = V1FilterDefinition(99, "exclude", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[4] = filtersByLevel + + filterDef = V1FilterDefinition(99, "exclude", "1.4.5") + filterDef.specificTrap = "*" + filtersByLevel = {"1.4.5-*": filterDef} + t.filter._filterspec._v1Filters[3] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1", + } + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.4.5.1" + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.4.5" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4.5" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForInvalidGenericTrap(t): + filterDef = V1FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 9, + "snmpV1Enterprise": "1.2", + } + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForMissingGenericTrap(t): + filterDef = V1FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + event = {"snmpVersion": "1", "snmpV1Enterprise": "1.2"} + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForMissingEnterpriseOID(t): + filterDef = V1FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + } + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForEnterpriseAllExcept(t): + filterDef = V1FilterDefinition(99, "include", "1.2.3") + filterDef.specificTrap = "*" + filtersByLevel = {"1.2.3-*": filterDef} + t.filter._filterspec._v1Filters[3] = filtersByLevel + + filterDef = V1FilterDefinition(99, "exclude", "1.2.3") + filterDef.specificTrap = "59" + filtersByLevel["1.2.3-59"] = filterDef + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1.2.3", + "snmpV1SpecificTrap": 59, + } + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1SpecificTrap"] = 99 + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV1EventForEnterpriseSpecific(t): + filterDef = V1FilterDefinition(99, "include", "1.2.3") + filterDef.specificTrap = "59" + filtersByLevel = {"1.2.3-59": filterDef} + t.filter._filterspec._v1Filters[3] = filtersByLevel + + filterDef = V1FilterDefinition(99, "include", "1.2.3") + filterDef.specificTrap = "60" + filtersByLevel["1.2.3-60"] = filterDef + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1.2.3", + "snmpV1SpecificTrap": 59, + } + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1SpecificTrap"] = 60 + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpV1SpecificTrap"] = 1 + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2.3.4" + t.assertTrue(t.filter._dropEvent(event)) + + event["snmpV1Enterprise"] = "1.2" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV2EventForSimpleExactMatch(t): + filterDef = V2FilterDefinition(99, "exclude", "1.2.3.4") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[4] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1.2.3.4"} + t.assertTrue(t.filter._dropEvent(event)) + + filterDef.action = "include" + t.assertFalse(t.filter._dropEvent(event)) + + def testDropV2EventForSimpleGlobMatch(t): + filterDef = V2FilterDefinition(99, "exclude", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[4] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1.2.3.4"} + t.assertTrue(t.filter._dropEvent(event)) + + filterDef.action = "include" + t.assertFalse(t.filter._dropEvent(event)) + + # This test uses 1 filters for each of two OID levels where the filter + # specifies an exact match + def testDropV2EventForSimpleExactMatches(t): + filterDef = V2FilterDefinition(99, "include", "1.2.3") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[3] = filtersByLevel + + filterDef = V2FilterDefinition(99, "include", "1.2.3.4") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[4] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1.2.3"} + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4" + t.assertFalse(t.filter._dropEvent(event)) + + # OIDs with fewer or more levels than the existing filters + # should NOT match + event["oid"] = "1.2" + t.assertTrue(t.filter._dropEvent(event)) + event["oid"] = "1.2.3.4.9" + t.assertTrue(t.filter._dropEvent(event)) + + # OIDs that differ only in the last level should NOT match + event["oid"] = "1.2.9" + t.assertTrue(t.filter._dropEvent(event)) + event["oid"] = "1.2.3.9" + t.assertTrue(t.filter._dropEvent(event)) + + # This test uses 1 filters for each of two OID levels where the filter + # specifies a glob match + def testDropV2EventForSimpleGlobMatches(t): + filterDef = V2FilterDefinition(99, "include", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[4] = filtersByLevel + + filterDef = V2FilterDefinition(99, "include", "1.2.3.4.5.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[6] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1.2.3.4"} + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.99" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.99.5" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4.99" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4.5" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4.5.99" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.99.4" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.99.4.5.6" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV2EventIncludeAll(t): + filterDef = V2FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[1] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1"} + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1." + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3" + t.assertFalse(t.filter._dropEvent(event)) + + def testDropV2EventExcludeAll(t): + filterDef = V2FilterDefinition(99, "exclude", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[1] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1"} + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropV2EventExcludeAllBut(t): + filterDef = V2FilterDefinition(99, "exclude", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[1] = filtersByLevel + + filterDef = V2FilterDefinition(99, "include", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[4] = filtersByLevel + + filterDef = V2FilterDefinition(99, "include", "1.4.5") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[3] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1"} + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.4.5.1" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.4.5" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4.5" + t.assertFalse(t.filter._dropEvent(event)) + + def testDropV2EventIncludeAllBut(t): + filterDef = V2FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[1] = filtersByLevel + + filterDef = V2FilterDefinition(99, "exclude", "1.2.3.*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[4] = filtersByLevel + + filterDef = V2FilterDefinition(99, "exclude", "1.4.5") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[3] = filtersByLevel + + event = {"snmpVersion": "2", "oid": "1"} + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.4.5.1" + t.assertFalse(t.filter._dropEvent(event)) + + event["oid"] = "1.4.5" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4" + t.assertTrue(t.filter._dropEvent(event)) + + event["oid"] = "1.2.3.4.5" + t.assertTrue(t.filter._dropEvent(event)) + + def testDropEvent(t): + filterDef = V1FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v1Filters[1] = filtersByLevel + + filterDef = V2FilterDefinition(99, "include", "*") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[1] = filtersByLevel + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1.2.3", + "snmpV1SpecificTrap": 59, + } + t.assertFalse(t.filter._dropEvent(event)) + + event = { + "snmpVersion": "2", + "oid": "1.2.3", + } + t.assertFalse(t.filter._dropEvent(event)) + + event["snmpVersion"] = "invalidVersion" + t.assertTrue(t.filter._dropEvent(event)) + + def testTransformPassesV1Event(t): + filterDef = V1FilterDefinition(99, "include", "1.2.3") + filterDef.specificTrap = "59" + filtersByLevel = {"1.2.3-59": filterDef} + t.filter._filterspec._v1Filters[3] = filtersByLevel + t.filter._filterspec._filtersDefined = True + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": filterDef.oid, + "snmpV1SpecificTrap": filterDef.specificTrap, + } + t.assertEquals(TRANSFORM_CONTINUE, t.filter.transform(event)) + + def testTransformDropsV1Event(t): + filterDef = V1FilterDefinition(99, "exclude", "1.2.3") + filterDef.specificTrap = "59" + filtersByLevel = {"1.2.3-59": filterDef} + t.filter._app.counters = { + "eventCount": 0, + "eventFilterDroppedCount": 0, + } + t.filter._filterspec._v1Filters[3] = filtersByLevel + t.filter._filterspec._filtersDefined = True + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": filterDef.oid, + "snmpV1SpecificTrap": filterDef.specificTrap, + } + t.assertEquals(TRANSFORM_DROP, t.filter.transform(event)) + + def testTransformPassesV2Event(t): + filterDef = V2FilterDefinition(99, "include", "1.2.3") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[3] = filtersByLevel + t.filter._filterspec._filtersDefined = True + + event = { + "snmpVersion": "2", + "oid": filterDef.oid, + } + t.assertEquals(TRANSFORM_CONTINUE, t.filter.transform(event)) + + def testTransformPassesV3Event(t): + filterDef = V2FilterDefinition(99, "include", "1.2.3") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._filterspec._v2Filters[3] = filtersByLevel + t.filter._filterspec._filtersDefined = True + + event = { + "snmpVersion": "3", + "oid": filterDef.oid, + } + t.assertEquals(TRANSFORM_CONTINUE, t.filter.transform(event)) + + def testTransformDropsV2Event(t): + filterDef = V2FilterDefinition(99, "exclude", "1.2.3") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._app.counters = { + "eventCount": 0, + "eventFilterDroppedCount": 0, + } + t.filter._filterspec._v2Filters[3] = filtersByLevel + t.filter._filterspec._filtersDefined = True + + event = { + "snmpVersion": "2", + "oid": filterDef.oid, + } + t.assertEquals(TRANSFORM_DROP, t.filter.transform(event)) + + def testTransformDropsV3Event(t): + filterDef = V2FilterDefinition(99, "exclude", "1.2.3") + filtersByLevel = {filterDef.oid: filterDef} + t.filter._app.counters = { + "eventCount": 0, + "eventFilterDroppedCount": 0, + } + t.filter._filterspec._v2Filters[3] = filtersByLevel + t.filter._filterspec._filtersDefined = True + + event = { + "snmpVersion": "3", + "oid": filterDef.oid, + } + t.assertEquals(TRANSFORM_DROP, t.filter.transform(event)) + + def testTransformWithoutFilters(t): + t.filter._filterspec._filtersDefined = False + + event = { + "snmpVersion": "1", + "snmpV1GenericTrapType": 6, + "snmpV1Enterprise": "1.2.3", + "snmpV1SpecificTrap": 59, + } + t.assertEquals(TRANSFORM_CONTINUE, t.filter.transform(event)) + + event = { + "snmpVersion": "2", + "oid": "1.2.3", + } + t.assertEquals(TRANSFORM_CONTINUE, t.filter.transform(event)) + + def testTrapFilterDefaultParse(t): + t.spec.update_from_string(EventManagerBase.trapFilters) + # t.assertEquals(t.filter._eventService.sendEvent.called, False) + t.assertEquals(len(t.filter._filterspec._v1Traps), 6) + t.assertEquals(len(t.filter._filterspec._v1Filters), 1) + t.assertEquals(len(t.filter._filterspec._v2Filters), 1) + + def testTrapFilterParseCollectorMatch(t): + filterCfg = "localhost exclude v2 1.3.6.1.2.1.43.18.2.0.1" + t.spec.update_from_string(filterCfg) + # t.assertEquals(t.filter._eventService.sendEvent.called, False) + t.assertEquals(len(t.filter._filterspec._v2Filters), 1) + + def testTrapFilterParseCollectorNotMatch(t): + filterCfg = "remoteDMZ exclude v2 1.3.6.1.2.1.43.18.2.0.1" + t.spec.update_from_string(filterCfg) + # t.assertEquals(t.filter._eventService.sendEvent.called, False) + t.assertEquals(len(t.filter._filterspec._v2Filters), 0) + + +# def test_suite(): +# from unittest import TestSuite, makeSuite +# +# suite = TestSuite() +# suite.addTest(makeSuite(TrapFilterTest)) +# return suite diff --git a/Products/ZenEvents/zentrap/trapfilter.py b/Products/ZenEvents/zentrap/trapfilter.py new file mode 100644 index 0000000000..76d8a5266e --- /dev/null +++ b/Products/ZenEvents/zentrap/trapfilter.py @@ -0,0 +1,301 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from twisted.internet import defer +from zope.interface import implementer + +from Products.ZenHub.interfaces import ( + ICollectorEventTransformer, + TRANSFORM_CONTINUE, + TRANSFORM_DROP, +) + +from .filterspec import countOidLevels + +log = logging.getLogger("zen.zentrap.trapfilter") + + +@implementer(ICollectorEventTransformer) +class TrapFilter(object): + """ + Interface used to perform filtering of events at the collector. + This could be used to drop events, transform event content, etc. + + These transformers are run sequentially before a fingerprint is generated + for the event, so they can set fields which are used by an + ICollectorEventFingerprintGenerator. + + The priority of the event transformer (the transformers are executed in + ascending order using the weight of each filter). + """ + + # implements ICollectorEventTransformer + weight = 1 + + def __init__(self, app, spec): + self._app = app + self._filterspec = spec + self._checksum = None + self._filters = ( + GenericV1Predicate(self._filterspec.v1traps), + EnterpriseV1Predicate(self._filterspec.v1filters), + SnmpV2Predicate(self._filterspec.v2filters), + ) + + # implements ICollectorEventTransformer + def transform(self, event): + """ + Performs any transforms of the specified event at the collector. + + @param event: The event to transform. + @type event: dict + @return: Returns TRANSFORM_CONTINUE if this event should be + forwarded on to the next transformer in the sequence, + TRANSFORM_STOP if no further transformers should be performed on + this event, and TRANSFORM_DROP if the event should be dropped. + @rtype: int + """ + result = TRANSFORM_CONTINUE + snmpVersion = event.get("snmpVersion", None) + if snmpVersion and self._filterspec.defined: + log.debug("filtering V%s event %s", snmpVersion, event) + if self._dropEvent(event): + log.debug("dropping event %s", event) + self._app.counters["eventFilterDroppedCount"] += 1 + result = TRANSFORM_DROP + else: + log.debug( + "filter skipped snmp-version=%s filters-defined=%s event=%s", + snmpVersion, + self._filterspec.defined, + event, + ) + return result + + @defer.inlineCallbacks + def task(self): + log.debug("retrieving trap filters") + try: + service = yield self._app.getRemoteConfigServiceProxy() + checksum, trapfilters = yield service.callRemote( + "getTrapFilters", self._checksum + ) + if checksum is None: + log.debug("no update on trap filters") + defer.returnValue(None) + + trapfilters = trapfilters if trapfilters is not None else "" + events = self._filterspec.update_from_string(trapfilters) + for event in events: + mesg = event.get("message") + if mesg: + log.warn(mesg) + self._app.sendEvent(event) + + state = "updated" if self._checksum is not None else "initial" + log.info("applied %s trap filters", state) + self._checksum = checksum + except Exception: + log.exception("failed to retrieve trap filters") + + def _dropEvent(self, event): + """ + Determine if an event should be dropped. + Assumes there are some filters defined, so the default if no matching + filter is found should be True; i.e. the event did not match any + existing filter that would include it, so therefore we should drop it. + + @param event: The event to drop or keep. + @return: Returns True if the event should be dropped; + False if the event be kept. + @rtype: boolean + """ + trapfilter = next( + (tf for tf in self._filters if tf.is_valid(event)), None + ) + if trapfilter: + log.debug("using trap filter %r", trapfilter) + return trapfilter(event) + log.error("dropping unknown trap event=%r", event) + return True + + +class TrapFilterPredicate(object): + """ + Base class for predicates that determine whether a trap is ignored. + + Predicate implementations will return True indicating that the + event should be ignored. + """ + + def __init__(self, definitions): + self._definitions = definitions + + def is_valid(self, event): + return False + + def __call__(self, event): + return False + + +class GenericV1Predicate(TrapFilterPredicate): + def __init__(self, *args): + super(GenericV1Predicate, self).__init__(*args) + self._genericTraps = frozenset([0, 1, 2, 3, 4, 5]) + + def is_valid(self, event): + if event.get("snmpVersion", None) != "1": + return False + if event.get("snmpV1GenericTrapType", None) not in self._genericTraps: + return False + return True + + def __call__(self, event): + traptype = event.get("snmpV1GenericTrapType", None) + definition = self._definitions.get(traptype, None) + if definition and not definition.exclude: + return False + return True + + +class EnterpriseV1Predicate(TrapFilterPredicate): + def is_valid(self, event): + if event.get("snmpVersion", None) != "1": + return False + if event.get("snmpV1GenericTrapType", None) != 6: + return False + return True + + def __call__(self, event): + oid = event.get("snmpV1Enterprise", None) + if oid is None: + log.error( + "No OID found for enterprise-specific trap for V1 event: %s", + event, + ) + return True + + return _check_definitions( + ( + getter() + for getter in ( + # order is important! + lambda: self._getSpecificTrapDefinition(event, oid), + lambda: self._getWildCardDefinition(oid), + lambda: self._getGlobMatchDefinition(oid), + ) + ) + ) + + def _getSpecificTrapDefinition(self, event, enterpriseOID): + specificTrap = event.get("snmpV1SpecificTrap", None) + if specificTrap is None: + return None + key = "".join([enterpriseOID, "-", str(specificTrap)]) + definition = _findFilterByLevel(key, self._definitions) + if definition: + log.debug("matched [specific-trap] definition %s", definition) + return definition + + def _getWildCardDefinition(self, enterpriseOID): + key = "".join([enterpriseOID, "-", "*"]) + definition = _findFilterByLevel(key, self._definitions) + if definition: + log.debug("matched [wildcard] definition %s", definition) + return definition + + def _getGlobMatchDefinition(self, enterpriseOID): + definition = _findClosestGlobbedFilter( + enterpriseOID, self._definitions + ) + if definition: + log.debug("matched [glob] definition %s", definition) + return definition + + +class SnmpV2Predicate(TrapFilterPredicate): + def is_valid(self, event): + return event.get("snmpVersion", None) in ("2", "3") + + def __call__(self, event): + oid = event["oid"] + return _check_definitions( + ( + getter() + for getter in ( + # order is important! + lambda: self._getExactMatchDefinition(oid), + lambda: self._getGlobMatchDefinition(oid), + ) + ) + ) + + def _getExactMatchDefinition(self, oid): + # First, try an exact match on the OID + definition = _findFilterByLevel(oid, self._definitions) + if definition: + log.debug("matched [exact] definition %s", definition) + return definition + + def _getGlobMatchDefinition(self, oid): + definition = _findClosestGlobbedFilter(oid, self._definitions) + if definition: + log.debug("matched [glob] definition %s", definition) + return definition + + +def _check_definitions(definitions): + definition = next((defn for defn in definitions if defn is not None), None) + if definition is None: + log.debug("no matching definitions found") + return True + return definition.exclude + + +def _findClosestGlobbedFilter(oid, filtersByLevel): + filterDefinition = None + globbedValue = oid + while globbedValue != "*": + globbedValue = getNextHigherGlobbedOid(globbedValue) + filterDefinition = _findFilterByLevel(globbedValue, filtersByLevel) + if filterDefinition: + break + return filterDefinition + + +def _findFilterByLevel(oid, filtersByLevel): + filterDefinition = None + oidLevels = countOidLevels(oid) + filtersByOid = filtersByLevel.get(oidLevels, None) + if filtersByOid is not None and len(filtersByOid) > 0: + filterDefinition = filtersByOid.get(oid, None) + return filterDefinition + + +def getNextHigherGlobbedOid(oid): + """ + Gets the next highest globbed OID based on OID hierarchy. + For instance, given an oid of or "1.2.3.4" or 1.2.3.4.*", return "1.2.3.*". + + @return: The next highest globbed OID up to just "*" + @rtype: string + """ + dotIndex = oid.rfind(".") + if dotIndex != -1 and oid[dotIndex:] == ".*": + dotIndex = oid.rfind(".", 0, dotIndex) + + if dotIndex < 1 or dotIndex == len(oid) - 1: + nextGlobbedOID = "*" + else: + nextGlobbedOID = "".join([oid[0:dotIndex], ".*"]) + return nextGlobbedOID diff --git a/Products/ZenEvents/zentrap/users.py b/Products/ZenEvents/zentrap/users.py new file mode 100644 index 0000000000..a78795853e --- /dev/null +++ b/Products/ZenEvents/zentrap/users.py @@ -0,0 +1,44 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from twisted.internet import defer + +log = logging.getLogger("zen.zentrap.users") + + +class CreateAllUsers(object): + """ + Create all users task. + """ + + def __init__(self, app, receiver): + self._app = app + self._receiver = receiver + self._users = [] + + @defer.inlineCallbacks + def task(self): + try: + service = yield self._app.getRemoteConfigServiceProxy() + users = yield service.callRemote("createAllUsers") + diffs = tuple(u for u in users if u not in self._users) + if diffs: + log.debug( + "received %d new/updated user%s", + len(diffs), + "s" if len(diffs) != 1 else "", + ) + self._receiver.create_users(diffs) + self._users = users + except Exception: + log.exception("failed to retrieve SNMP users") diff --git a/Products/ZenHub/HubService.py b/Products/ZenHub/HubService.py index 758c2cf0c8..f54bbfae00 100644 --- a/Products/ZenHub/HubService.py +++ b/Products/ZenHub/HubService.py @@ -16,21 +16,57 @@ from Products.ZenUtils.deprecated import deprecated -class HubService(pb.Referenceable): +class HubService(object, pb.Referenceable): + """ + The base class for a ZenHub service class. + + :attr log: The logger object for this service. + :type log: logging.Logger + :attr fqdn: This attribute is deprecated. + :type fqdn: str + :attr dmd: Root ZODB object + :type dmd: Products.ZenModel.DataRoot.DataRoot + :attr instance: The name of the Collection Hub. + :type instance: str + :attr callTime: The total time, in seconds, this service has spent processing remote requests. + :type callTime: float + + :attr listeners: ZenHub clients for this service + :type listeners: List[twisted.spread.pb.RemoteReference] + :attr listenerOptions: Options associated with the client for this service. + :type listenerOptions: Mapping[twisted.spread.pb.RemoteReference, Mapping[Any, Any]] + """ # noqa E501 + def __init__(self, dmd, instance): - self.log = logging.getLogger("zen.hub") + """ + Initialize a HubService instance. + + :param dmd: The root Zenoss object in ZODB. + :type dmd: Products.ZenModel.DataRoot.DataRoot + :param instance: The name of the collection monitor. + :type instance: str + """ + self.log = logging.getLogger( + "zen.hub.{}".format(type(self).__name__.lower()) + ) self.fqdn = socket.getfqdn() self.dmd = dmd self.zem = dmd.ZenEventManager self.instance = instance - self.listeners = [] + self.callTime = 0.0 + self.listeners = [] # Clients of this service self.listenerOptions = {} - self.callTime = 0 def getPerformanceMonitor(self): + """ + Return the performance monitor (collection hub) instance. + + :return type: Products.ZenModel.interfaces.IMonitor + """ return self.dmd.Monitors.getPerformanceMonitor(self.instance) def remoteMessageReceived(self, broker, message, args, kw): + """Overrides pb.Referenceable method.""" self.log.debug("Servicing %s in %s", message, self.name()) now = time.time() try: @@ -53,6 +89,11 @@ def deleted(self, object): pass def name(self): + """ + Return the name of this ZenHub service class. + + :return type: str + """ return self.__class__.__name__ def addListener(self, remote, options=None): @@ -63,15 +104,18 @@ def addListener(self, remote, options=None): self.listenerOptions[remote] = options def removeListener(self, listener): - self.log.debug( - "removing listener for %s:%s", self.instance, self.name() - ) - try: + if listener in self.listeners: self.listeners.remove(listener) - except ValueError: - self.warning("Unable to remove listener... ignoring") - - self.listenerOptions.pop(listener, None) + self.listenerOptions.pop(listener, None) + self.log.debug( + "removed listener for %s:%s", self.instance, self.name() + ) + else: + self.log.debug( + "listener is not registered for %s:%s", + self.instance, + self.name(), + ) def sendEvents(self, events): map(self.sendEvent, events) diff --git a/Products/ZenHub/PBDaemon.py b/Products/ZenHub/PBDaemon.py index dcb2903342..2adb35114f 100644 --- a/Products/ZenHub/PBDaemon.py +++ b/Products/ZenHub/PBDaemon.py @@ -7,46 +7,27 @@ # ############################################################################## -"""PBDaemon - -Base for daemons that connect to zenhub - -""" - import collections import os import sys -import time -import traceback -from functools import partial -from hashlib import sha1 from itertools import chain from urlparse import urlparse -import twisted.python.log - -from metrology import Metrology -from metrology.instruments import Gauge -from metrology.registry import registry -from twisted.cred import credentials -from twisted.internet import reactor, defer, task -from twisted.internet.error import ( - AlreadyCalled, - ConnectionLost, - ReactorNotRunning, -) -from twisted.python.failure import Failure +import six + +from twisted.cred.credentials import UsernamePassword +from twisted.internet.endpoints import clientFromString, serverFromString +from twisted.internet import defer, reactor, task +from twisted.internet.error import ReactorNotRunning from twisted.spread import pb -from ZODB.POSException import ConflictError -from zope.component import getUtilitiesFor -from zope.interface import implements +from zope.component import provideUtility +from zope.interface import implementer from Products.ZenEvents.ZenEventClasses import ( App_Start, App_Stop, Clear, - Heartbeat, Warning, ) from Products.ZenRRD.Thresholds import Thresholds @@ -59,99 +40,23 @@ MetricWriter, ThresholdNotifier, ) -from Products.ZenUtils.PBUtil import ReconnectingPBClientFactory -from Products.ZenUtils.Utils import zenPath, atomicWrite +from Products.ZenUtils.Utils import importClass, lookupClass from Products.ZenUtils.ZenDaemon import ZenDaemon -from .interfaces import ( - ICollectorEventFingerprintGenerator, - ICollectorEventTransformer, - TRANSFORM_DROP, - TRANSFORM_STOP, -) +from .errors import HubDown, translateError +from .events import EventClient, EventQueueManager +from .interfaces import IEventService +from .localserver import LocalServer, ZenHubStatus from .metricpublisher import publisher +from .pinger import PingZenHub +from .zenhubclient import ZenHubClient + +PB_PORT = 8789 # field size limits for events DEFAULT_LIMIT = 524288 # 512k LIMITS = {"summary": 256, "message": 4096} - -class RemoteException(pb.Error, pb.Copyable, pb.RemoteCopy): - """Exception that can cross the PB barrier""" - - def __init__(self, msg, tb): - super(RemoteException, self).__init__(msg) - self.traceback = tb - - def getStateToCopy(self): - return { - "args": tuple(self.args), - "traceback": self.traceback, - } - - def setCopyableState(self, state): - self.args = state["args"] - self.traceback = state["traceback"] - - def __str__(self): - return "%s:%s" % ( - super(RemoteException, self).__str__(), - ("\n" + self.traceback) if self.traceback else " ", - ) - - -pb.setUnjellyableForClass(RemoteException, RemoteException) - - -# ZODB conflicts -class RemoteConflictError(RemoteException): - pass - - -pb.setUnjellyableForClass(RemoteConflictError, RemoteConflictError) - - -# Invalid monitor specified -class RemoteBadMonitor(RemoteException): - pass - - -pb.setUnjellyableForClass(RemoteBadMonitor, RemoteBadMonitor) - - -def translateError(callable): - """ - Decorator function to wrap remote exceptions into something - understandable by our daemon. - - @parameter callable: function to wrap - @type callable: function - @return: function's return or an exception - @rtype: various - """ - - def inner(*args, **kw): - """ - Interior decorator - """ - try: - return callable(*args, **kw) - except ConflictError as ex: - raise RemoteConflictError( - "Remote exception: %s: %s" % (ex.__class__, ex), - traceback.format_exc(), - ) - except Exception as ex: - raise RemoteException( - "Remote exception: %s: %s" % (ex.__class__, ex), - traceback.format_exc(), - ) - - return inner - - -PB_PORT = 8789 - startEvent = { "eventClass": App_Start, "summary": "started", @@ -164,541 +69,142 @@ def inner(*args, **kw): "severity": Warning, } - DEFAULT_HUB_HOST = "localhost" DEFAULT_HUB_PORT = PB_PORT DEFAULT_HUB_USERNAME = "admin" -DEFAULT_HUB_PASSWORD = "zenoss" +DEFAULT_HUB_PASSWORD = "zenoss" # noqa S105 DEFAULT_HUB_MONITOR = "localhost" -class HubDown(Exception): - pass - - class FakeRemote: def callRemote(self, *args, **kwargs): - ex = HubDown("ZenHub is down") - return defer.fail(ex) - - -class DefaultFingerprintGenerator(object): - """ - Generates a fingerprint using a checksum of properties of the event. - """ + return defer.fail(HubDown()) - implements(ICollectorEventFingerprintGenerator) - weight = 100 - - _IGNORE_FIELDS = ("rcvtime", "firstTime", "lastTime") - - def generate(self, event): - fields = [] - for k, v in sorted(event.iteritems()): - if k not in DefaultFingerprintGenerator._IGNORE_FIELDS: - if isinstance(v, unicode): - v = v.encode("utf-8") - else: - v = str(v) - fields.extend((k, v)) - return sha1("|".join(fields)).hexdigest() - - -def _load_utilities(utility_class): - """ - Loads ZCA utilities of the specified class. - - @param utility_class: The type of utility to load. - @return: A list of utilities, sorted by their 'weight' attribute. - """ - utilities = (f for n, f in getUtilitiesFor(utility_class)) - return sorted(utilities, key=lambda f: getattr(f, "weight", 100)) +@implementer(IEventService) +class PBDaemon(ZenDaemon, pb.Referenceable): + """Base class for services that connect to ZenHub.""" + mname = name = "pbdaemon" -class BaseEventQueue(object): - def __init__(self, maxlen): - self.maxlen = maxlen + initialServices = ["EventService"] - def append(self, event): - """ - Appends the event to the queue. + _customexitcode = 0 - @param event: The event. - @return: If the queue is full, this will return the oldest event - which was discarded when this event was added. - """ - raise NotImplementedError() + def __init__( + self, + noopts=0, + keeproot=False, + name=None, + publisher=None, + internal_publisher=None, + ): + # if we were provided our collector name via the constructor + # instead of via code, be sure to store it correctly. + if name is not None: + self.name = self.mname = name - def popleft(self): - """ - Removes and returns the oldest event from the queue. If the queue - is empty, raises IndexError. + provideUtility(self, IEventService) - @return: The oldest event from the queue. - @raise IndexError: If the queue is empty. - """ - raise NotImplementedError() + super(PBDaemon, self).__init__(noopts, keeproot) - def extendleft(self, events): - """ - Appends the events to the beginning of the queue (they will be the - first ones removed with calls to popleft). The list of events are - expected to be in order, with the earliest queued events listed - first. - - @param events: The events to add to the beginning of the queue. - @type events: list - @return A list of discarded events that didn't fit on the queue. - @rtype list - """ - raise NotImplementedError() + # Configure/initialize the ZenHub client + self.__zhclient = _getZenHubClient(self, self.options) + self.__zhclient.notify_on_connect(self._load_initial_services) + self.__zenhub_ready = None - def __len__(self): - """ - Returns the length of the queue. + self.__pinger = PingZenHub(self.__zhclient) - @return: The length of the queue. - """ - raise NotImplementedError() - - def __iter__(self): - """ - Returns an iterator over the elements in the queue (oldest events - are returned first). - """ - raise NotImplementedError() - - -class DequeEventQueue(BaseEventQueue): - """ - Event queue implementation backed by a deque. This queue does not - perform de-duplication of events. - """ - - def __init__(self, maxlen): - super(DequeEventQueue, self).__init__(maxlen) - self.queue = collections.deque() - - def append(self, event): - # Make sure every processed event specifies the time it was queued. - if "rcvtime" not in event: - event["rcvtime"] = time.time() - - discarded = None - if len(self.queue) == self.maxlen: - discarded = self.popleft() - self.queue.append(event) - return discarded - - def popleft(self): - return self.queue.popleft() - - def extendleft(self, events): - if not events: - return events - available = self.maxlen - len(self.queue) - if not available: - return events - to_discard = 0 - if available < len(events): - to_discard = len(events) - available - self.queue.extendleft(reversed(events[to_discard:])) - return events[:to_discard] - - def __len__(self): - return len(self.queue) - - def __iter__(self): - return iter(self.queue) - - -class DeDupingEventQueue(BaseEventQueue): - """ - Event queue implementation backed by a OrderedDict. This queue performs - de-duplication of events (when an event with the same fingerprint is - seen, the 'count' field of the event is incremented by one instead of - sending an additional event). - """ - - def __init__(self, maxlen): - super(DeDupingEventQueue, self).__init__(maxlen) - self.default_fingerprinter = DefaultFingerprintGenerator() - self.fingerprinters = _load_utilities( - ICollectorEventFingerprintGenerator + self._thresholds = Thresholds() + self._threshold_notifier = ThresholdNotifier( + self.sendEvent, self._thresholds ) - self.queue = collections.OrderedDict() - - def _event_fingerprint(self, event): - for fingerprinter in self.fingerprinters: - event_fingerprint = fingerprinter.generate(event) - if event_fingerprint is not None: - break - else: - event_fingerprint = self.default_fingerprinter.generate(event) - return event_fingerprint - - def _first_time(self, event1, event2): - def first(evt): - return evt.get("firstTime", evt["rcvtime"]) - - return min(first(event1), first(event2)) - - def append(self, event): - # Make sure every processed event specifies the time it was queued. - if "rcvtime" not in event: - event["rcvtime"] = time.time() + self.rrdStats = DaemonStats() + self.lastStats = 0 + self.counters = collections.Counter() - fingerprint = self._event_fingerprint(event) - if fingerprint in self.queue: - # Remove the currently queued item - we will insert again which - # will move to the end. - current_event = self.queue.pop(fingerprint) - event["count"] = current_event.get("count", 1) + 1 - event["firstTime"] = self._first_time(current_event, event) - self.queue[fingerprint] = event - return + self.startEvent = startEvent.copy() + self.stopEvent = stopEvent.copy() + details = {"component": self.name, "device": self.options.monitor} + for evt in self.startEvent, self.stopEvent: + evt.update(details) - discarded = None - if len(self.queue) == self.maxlen: - discarded = self.popleft() + self._metrologyReporter = None + self.__statistics_task = None - self.queue[fingerprint] = event - return discarded + self.__publisher = publisher + self.__internal_publisher = internal_publisher + self.__metric_writer = None + self.__derivative_tracker = None - def popleft(self): - try: - return self.queue.popitem(last=False)[1] - except KeyError: - # Re-raise KeyError as IndexError for common interface across - # queues. - raise IndexError() - - def extendleft(self, events): - # Attempt to de-duplicate with events currently in queue - events_to_add = [] - for event in events: - fingerprint = self._event_fingerprint(event) - if fingerprint in self.queue: - current_event = self.queue[fingerprint] - current_event["count"] = current_event.get("count", 1) + 1 - current_event["firstTime"] = self._first_time( - current_event, event - ) - else: - events_to_add.append(event) - - if not events_to_add: - return events_to_add - available = self.maxlen - len(self.queue) - if not available: - return events_to_add - to_discard = 0 - if available < len(events_to_add): - to_discard = len(events_to_add) - available - old_queue, self.queue = self.queue, collections.OrderedDict() - for event in events_to_add[to_discard:]: - self.queue[self._event_fingerprint(event)] = event - for fingerprint, event in old_queue.iteritems(): - self.queue[fingerprint] = event - return events_to_add[:to_discard] - - def __len__(self): - return len(self.queue) - - def __iter__(self): - return self.queue.itervalues() - - -class EventQueueManager(object): - - CLEAR_FINGERPRINT_FIELDS = ( - "device", - "component", - "eventKey", - "eventClass", - ) - - def __init__(self, options, log): - self.options = options - self.transformers = _load_utilities(ICollectorEventTransformer) - self.log = log - self.discarded_events = 0 - # TODO: Do we want to limit the size of the clear event dictionary? - self.clear_events_count = {} - self._initQueues() - self._eventsSent = Metrology.meter("collectordaemon.eventsSent") - self._discardedEvents = Metrology.meter( - "collectordaemon.discardedEvent" - ) - self._eventTimer = Metrology.timer("collectordaemon.eventTimer") - metricNames = {x[0] for x in registry} - if "collectordaemon.eventQueue" not in metricNames: - queue = self - - class EventQueueGauge(Gauge): - @property - def value(self): - return queue.event_queue_length - - Metrology.gauge("collectordaemon.eventQueue", EventQueueGauge()) - - def _initQueues(self): - maxlen = self.options.maxqueuelen - queue_type = ( - DeDupingEventQueue - if self.options.deduplicate_events - else DequeEventQueue - ) - self.event_queue = queue_type(maxlen) - self.perf_event_queue = queue_type(maxlen) - self.heartbeat_event_queue = collections.deque(maxlen=1) - - def _transformEvent(self, event): - for transformer in self.transformers: - result = transformer.transform(event) - if result == TRANSFORM_DROP: - self.log.debug( - "Event dropped by transform %s: %s", transformer, event - ) - return None - if result == TRANSFORM_STOP: - break - return event - - def _clearFingerprint(self, event): - return tuple( - event.get(field, "") for field in self.CLEAR_FINGERPRINT_FIELDS + self.__eventqueue = None + self.__eventclient = None + self.__recordQueuedEventsCountLoop = task.LoopingCall( + self.__record_queued_events_count ) - def _removeDiscardedEventFromClearState(self, discarded): - # - # There is a particular condition that could cause clear events to - # never be sent until a collector restart. - # Consider the following sequence: - # - # 1) Clear event added to queue. This is the first clear event of - # this type and so it is added to the clear_events_count - # dictionary with a count of 1. - # 2) A large number of additional events are queued until maxqueuelen - # is reached, and so the queue starts to discard events including - # the clear event from #1. - # 3) The same clear event in #1 is sent again, however this time it - # is dropped because allowduplicateclears is False and the event - # has a > 0 count. - # - # To resolve this, we are careful to track all discarded events, and - # remove their state from the clear_events_count dictionary. - # - opts = self.options - if not opts.allowduplicateclears and opts.duplicateclearinterval == 0: - severity = discarded.get("severity", -1) - if severity == Clear: - clear_fingerprint = self._clearFingerprint(discarded) - if clear_fingerprint in self.clear_events_count: - self.clear_events_count[clear_fingerprint] -= 1 - - def _addEvent(self, queue, event): - if self._transformEvent(event) is None: - return - - allowduplicateclears = self.options.allowduplicateclears - duplicateclearinterval = self.options.duplicateclearinterval - if not allowduplicateclears or duplicateclearinterval > 0: - clear_fingerprint = self._clearFingerprint(event) - severity = event.get("severity", -1) - if severity != Clear: - # A non-clear event - clear out count if it exists - self.clear_events_count.pop(clear_fingerprint, None) - else: - current_count = self.clear_events_count.get( - clear_fingerprint, 0 - ) - self.clear_events_count[clear_fingerprint] = current_count + 1 - if not allowduplicateclears and current_count != 0: - self.log.debug( - "allowduplicateclears dropping clear event %r", event - ) - return - if ( - duplicateclearinterval > 0 - and current_count % duplicateclearinterval != 0 - ): - self.log.debug( - "duplicateclearinterval dropping clear event %r", event + if self.options.cycle: + self.__server = _getLocalServer(self.options) + self.__server.add_resource( + "zenhub", + ZenHubStatus( + lambda: ( + "connected" + if self.__zenhub_connected + else "disconnected" ) - return + ), + ) + else: + self.__server = None - discarded = queue.append(event) - self.log.debug( - "Queued event (total of %d) %r", len(self.event_queue), event + self.__zenhub_connected = False + self.__zhclient.notify_on_connect( + lambda: self._set_zenhub_connected(True) ) - if discarded: - self.log.warn("Discarded event - queue overflow: %r", discarded) - self._removeDiscardedEventFromClearState(discarded) - self.discarded_events += 1 - self._discardedEvents.mark() - - def addEvent(self, event): - self._addEvent(self.event_queue, event) - - def addPerformanceEvent(self, event): - self._addEvent(self.perf_event_queue, event) - def addHeartbeatEvent(self, heartbeat_event): - self.heartbeat_event_queue.append(heartbeat_event) - - @defer.inlineCallbacks - def sendEvents(self, event_sender_fn): - # Create new queues - we will flush the current queues and don't want - # to get in a loop sending events that are queued while we send this - # batch (the event sending is asynchronous). - prev_heartbeat_event_queue = self.heartbeat_event_queue - prev_perf_event_queue = self.perf_event_queue - prev_event_queue = self.event_queue - self._initQueues() - - perf_events = [] - events = [] - sent = 0 - try: - - def chunk_events(): - chunk_remaining = self.options.eventflushchunksize - heartbeat_events = [] - num_heartbeat_events = min( - chunk_remaining, len(prev_heartbeat_event_queue) - ) - for i in xrange(num_heartbeat_events): - heartbeat_events.append( - prev_heartbeat_event_queue.popleft() - ) - chunk_remaining -= num_heartbeat_events - - perf_events = [] - num_perf_events = min( - chunk_remaining, len(prev_perf_event_queue) - ) - for i in xrange(num_perf_events): - perf_events.append(prev_perf_event_queue.popleft()) - chunk_remaining -= num_perf_events - - events = [] - num_events = min(chunk_remaining, len(prev_event_queue)) - for i in xrange(num_events): - events.append(prev_event_queue.popleft()) - return heartbeat_events, perf_events, events - - heartbeat_events, perf_events, events = chunk_events() - while heartbeat_events or perf_events or events: - self.log.debug( - "Sending %d events, %d perf events, %d heartbeats", - len(events), - len(perf_events), - len(heartbeat_events), - ) - start = time.time() - yield event_sender_fn(heartbeat_events + perf_events + events) - duration = int((time.time() - start) * 1000) - self._eventTimer.update(duration) - sent += len(events) + len(perf_events) + len(heartbeat_events) - self._eventsSent.mark(len(events)) - self._eventsSent.mark(len(perf_events)) - self._eventsSent.mark(len(heartbeat_events)) - heartbeat_events, perf_events, events = chunk_events() - - defer.returnValue(sent) - except Exception: - # Restore performance events that failed to send - perf_events.extend(prev_perf_event_queue) - discarded_perf_events = self.perf_event_queue.extendleft( - perf_events + def _set_zenhub_connected(self, state): + self.__zenhub_connected = state + if state: + # Re-add the disconnect callback because the ZenHub client + # removes all disconnect callbacks after a disconnect. + self.__zhclient.notify_on_disconnect( + lambda: self._set_zenhub_connected(False) ) - self.discarded_events += len(discarded_perf_events) - self._discardedEvents.mark(len(discarded_perf_events)) - - # Restore events that failed to send - events.extend(prev_event_queue) - discarded_events = self.event_queue.extendleft(events) - self.discarded_events += len(discarded_events) - self._discardedEvents.mark(len(discarded_events)) - - # Remove any clear state for events that were discarded - for discarded in chain(discarded_perf_events, discarded_events): - self.log.debug( - "Discarded event - queue overflow: %r", discarded - ) - self._removeDiscardedEventFromClearState(discarded) - raise @property - def event_queue_length(self): - return ( - len(self.event_queue) - + len(self.perf_event_queue) - + len(self.heartbeat_event_queue) - ) - - -class PBDaemon(ZenDaemon, pb.Referenceable): + def local_server(self): + return self.__server - name = "pbdaemon" - initialServices = ["EventService"] - heartbeatEvent = {"eventClass": Heartbeat} - heartbeatTimeout = 60 * 3 - _customexitcode = 0 - _pushEventsDeferred = None - _eventHighWaterMark = None - _healthMonitorInterval = 30 - - def __init__(self, noopts=0, keeproot=False, name=None): - # if we were provided our collector name via the constructor instead of - # via code, be sure to store it correctly. - if name is not None: - self.name = name - self.mname = name - - try: - ZenDaemon.__init__(self, noopts, keeproot) - - except IOError: - import traceback + @property + def services(self): + return self.__zhclient.services - self.log.critical(traceback.format_exc(0)) - sys.exit(1) + def __record_queued_events_count(self): + if self.rrdStats.name and self.__eventqueue is not None: + self.rrdStats.gauge("eventQueueLength", len(self.__eventqueue)) - self._thresholds = None - self._threshold_notifier = None - self.rrdStats = DaemonStats() - self.lastStats = 0 - self.perspective = None - self.services = {} - self.eventQueueManager = EventQueueManager(self.options, self.log) - self.startEvent = startEvent.copy() - self.stopEvent = stopEvent.copy() - details = dict(component=self.name, device=self.options.monitor) - for evt in self.startEvent, self.stopEvent, self.heartbeatEvent: - evt.update(details) - self.initialConnect = defer.Deferred() - self.stopped = False - self.counters = collections.Counter() - self._pingedZenhub = None - self._connectionTimeout = None - self._publisher = None - self._internal_publisher = None - self._metric_writer = None - self._derivative_tracker = None - self._metrologyReporter = None - # Add a shutdown trigger to send a stop event and flush the event queue - reactor.addSystemEventTrigger("before", "shutdown", self._stopPbDaemon) + def generateEvent(self, event, **kw): + """ + Return a 'filled out' version of the given event. + """ + eventCopy = {} + for k, v in chain(event.items(), kw.items()): + if isinstance(v, six.string_types): + # default max size is 512k + size = LIMITS.get(k, DEFAULT_LIMIT) + eventCopy[k] = v[0:size] if len(v) > size else v + else: + eventCopy[k] = v - # Set up a looping call to support the health check. - self.healthMonitor = task.LoopingCall(self._checkZenHub) - self.healthMonitor.start(self._healthMonitorInterval) + eventCopy["agent"] = self.name + eventCopy["monitor"] = self.options.monitor + eventCopy["manager"] = self.fqdn + return eventCopy def publisher(self): - if not self._publisher: + if not self.__publisher: host, port = urlparse(self.options.redisUrl).netloc.split(":") try: port = int(port) @@ -710,273 +216,141 @@ def publisher(self): publisher.defaultRedisPort, ) port = publisher.defaultRedisPort - self._publisher = publisher.RedisListPublisher( + self.__publisher = publisher.RedisListPublisher( host, port, self.options.metricBufferSize, channel=self.options.metricsChannel, maxOutstandingMetrics=self.options.maxOutstandingMetrics, ) - return self._publisher + return self.__publisher + + def setInternalPublisher(self, publisher): + self.__internal_publisher = publisher def internalPublisher(self): - if not self._internal_publisher: + if not self.__internal_publisher: url = os.environ.get("CONTROLPLANE_CONSUMER_URL", None) username = os.environ.get("CONTROLPLANE_CONSUMER_USERNAME", "") password = os.environ.get("CONTROLPLANE_CONSUMER_PASSWORD", "") if url: - self._internal_publisher = publisher.HttpPostPublisher( + self.__internal_publisher = publisher.HttpPostPublisher( username, password, url ) - return self._internal_publisher + return self.__internal_publisher def metricWriter(self): - if not self._metric_writer: + if not self.__metric_writer: publisher = self.publisher() metric_writer = MetricWriter(publisher) if os.environ.get("CONTROLPLANE", "0") == "1": internal_publisher = self.internalPublisher() if internal_publisher: - internal_metric_filter = ( - lambda metric, value, timestamp, tags: tags - and tags.get("internal", False) - ) + + def _check_internal(metric, value, timestamp, tags): + return tags and tags.get("internal", False) + internal_metric_writer = FilteredMetricWriter( - internal_publisher, internal_metric_filter + internal_publisher, _check_internal ) - self._metric_writer = AggregateMetricWriter( + self.__metric_writer = AggregateMetricWriter( [metric_writer, internal_metric_writer] ) else: - self._metric_writer = metric_writer - return self._metric_writer + self.__metric_writer = metric_writer + return self.__metric_writer def derivativeTracker(self): - if not self._derivative_tracker: - self._derivative_tracker = DerivativeTracker() - return self._derivative_tracker - - def connecting(self): - """ - Called when about to connect to zenhub - """ - self.log.info("Attempting to connect to zenhub") - - def getZenhubInstanceId(self): - """ - Called after we connected to zenhub. - """ - - def callback(result): - self.log.info("Connected to the zenhub/%s instance", result) + if not self.__derivative_tracker: + self.__derivative_tracker = DerivativeTracker() + return self.__derivative_tracker - def errback(result): - self.log.info( - "Unexpected error appeared while getting zenhub " - "instance number %s", - result, - ) - - d = self.perspective.callRemote("getHubInstanceId") - d.addCallback(callback) - d.addErrback(errback) - return d - - def gotPerspective(self, perspective): - """ - This gets called every time we reconnect. - - @parameter perspective: Twisted perspective object - @type perspective: Twisted perspective object - """ - self.perspective = perspective - self.getZenhubInstanceId() - # Cancel the connection timeout timer as it's no longer needed. - if self._connectionTimeout: - try: - self._connectionTimeout.cancel() - except AlreadyCalled: - pass - self._connectionTimeout = None - d2 = self.getInitialServices() - if self.initialConnect: - self.log.debug("Chaining getInitialServices with d2") - self.initialConnect, d = None, self.initialConnect - d2.chainDeferred(d) + def eventService(self): + return self.getServiceNow("EventService") - def connect(self): - pingInterval = self.options.zhPingInterval - factory = ReconnectingPBClientFactory( - connectTimeout=60, - pingPerspective=self.options.pingPerspective, - pingInterval=pingInterval, - pingtimeout=pingInterval * 5, - ) - self.log.info( - "Connecting to %s:%d", self.options.hubhost, self.options.hubport - ) - factory.connectTCP(self.options.hubhost, self.options.hubport) - username = self.options.hubusername - password = self.options.hubpassword - self.log.debug("Logging in as %s", username) - c = credentials.UsernamePassword(username, password) - factory.gotPerspective = self.gotPerspective - factory.connecting = self.connecting - factory.setCredentials(c) - - def timeout(d): - if not d.called: - self.connectTimeout() - - self._connectionTimeout = reactor.callLater( - self.options.hubtimeout, timeout, self.initialConnect - ) - return self.initialConnect + def sendEvents(self, events): + if self.__eventclient is None: + return + return self.__eventclient.sendEvents(events) - def connectTimeout(self): - self.log.error("Timeout connecting to zenhub: is it running?") - pass + def sendHeartbeat(self, event): + if self.__eventclient is None: + return + self.__eventclient.sendHeartbeat(event) - def eventService(self): - return self.getServiceNow("EventService") + @defer.inlineCallbacks + def sendEvent(self, event, **kw): + if self.__eventclient is None: + return + yield self.__eventclient.sendEvent(event, **kw) def getServiceNow(self, svcName): - if svcName not in self.services: + svc = self.__zhclient.services.get(svcName) + if svc is None: self.log.warning( - "No service named %r: ZenHub may be disconnected", svcName + "no service named %r: ZenHub may be disconnected", svcName ) - return self.services.get(svcName, None) or FakeRemote() + return svc or FakeRemote() - def getService(self, serviceName, serviceListeningInterface=None): + @defer.inlineCallbacks + def getService(self, name, serviceListeningInterface=None): """ - Attempt to get a service from zenhub. Returns a deferred. - When service is retrieved it is stashed in self.services with - serviceName as the key. When getService is called it will first - check self.services and if serviceName is already there it will return - the entry from self.services wrapped in a defer.succeed + Attempt to get a service from ZenHub. + + @rtype: Deferred """ - if serviceName in self.services: - return defer.succeed(self.services[serviceName]) - - def removeService(ignored): - self.log.debug("Removing service %s", serviceName) - if serviceName in self.services: - del self.services[serviceName] - - def callback(result, serviceName): - self.log.debug("Loaded service %s from zenhub", serviceName) - self.services[serviceName] = result - result.notifyOnDisconnect(removeService) - return result - - def errback(error, serviceName): - self.log.debug("errback after getting service %s", serviceName) - self.log.error("Could not retrieve service %s", serviceName) - if serviceName in self.services: - del self.services[serviceName] - return error - - d = self.perspective.callRemote( - "getService", - serviceName, + svc = yield self.__zhclient.get_service( + name, self.options.monitor, serviceListeningInterface or self, self.options.__dict__, ) - d.addCallback(callback, serviceName) - d.addErrback(errback, serviceName) - return d - - def getInitialServices(self): - """ - After connecting to zenhub, gather our initial list of services. - """ + defer.returnValue(svc) - def errback(error): - if isinstance(error, Failure): - self.log.critical( - "Invalid monitor: %s: %s", self.options.monitor, error - ) - reactor.stop() - return defer.fail( - RemoteBadMonitor( - "Invalid monitor: %s" % self.options.monitor, "" - ) - ) - return error - - self.log.debug( - "Setting up initial services: %s", ", ".join(self.initialServices) - ) - d = defer.DeferredList( - [self.getService(name) for name in self.initialServices], - fireOnOneErrback=True, - consumeErrors=True, - ) - d.addErrback(errback) - return d + def connect(self): + self.__zenhub_ready = self.__zhclient.start() + self.__pinger.start() + return self.__zenhub_ready def connected(self): - pass + """ + Invoked after a ZenHub connection is established and the + initial set of services have been loaded. - def _getThresholdNotifier(self): - if not self._threshold_notifier: - self._threshold_notifier = ThresholdNotifier( - self.sendEvent, self.getThresholds() - ) - return self._threshold_notifier + Sub-classes should override this method to add their own + functionality. + + @rtype: Deferred + """ def getThresholds(self): - if not self._thresholds: - self._thresholds = Thresholds() return self._thresholds def run(self): - def stopReporter(): - if self._metrologyReporter: - return self._metrologyReporter.stop() - - # Order of the shutdown triggers matter. Want to stop reporter first, - # calling self.metricWriter() below registers shutdown triggers for - # the actual metric http and redis publishers. - reactor.addSystemEventTrigger("before", "shutdown", stopReporter) + # Start the connection to zenhub + self.connect() - threshold_notifier = self._getThresholdNotifier() self.rrdStats.config( self.name, self.options.monitor, self.metricWriter(), - threshold_notifier, + self._threshold_notifier, self.derivativeTracker(), ) - self.log.debug("Starting PBDaemon initialization") - d = self.connect() - - def callback(result): - self.sendEvent(self.startEvent) - self.pushEventsLoop() - self.log.debug("Calling connected.") - self.connected() - return result - - def startStatsLoop(): - self.log.debug("Starting Statistic posting") - loop = task.LoopingCall(self.postStatistics) - loop.start(self.options.writeStatistics, now=False) - daemonTags = { - "zenoss_daemon": self.name, - "zenoss_monitor": self.options.monitor, - "internal": True, - } - self._metrologyReporter = TwistedMetricReporter( - self.options.writeStatistics, self.metricWriter(), daemonTags - ) - self._metrologyReporter.start() if self.options.cycle: - reactor.callWhenRunning(startStatsLoop) - d.addCallback(callback) - d.addErrback(twisted.python.log.err) + self.__server.start() + reactor.addSystemEventTrigger( + "before", "shutdown", self.__server.stop + ) + + reactor.addSystemEventTrigger( + "after", + "shutdown", + lambda: self.log.info("%s shutting down", self.name), + ) + + reactor.callWhenRunning(self._started) reactor.run() if self._customexitcode: sys.exit(self._customexitcode) @@ -989,205 +363,146 @@ def stop(self, ignored=""): try: reactor.stop() except ReactorNotRunning: - self.log.debug("Tried to stop reactor that was stopped") + self.log.debug("tried to stop reactor that was stopped") else: self.log.debug("stop() called when not running") - def _stopPbDaemon(self): - if self.stopped: - return - self.stopped = True - if "EventService" in self.services: - # send stop event if we don't have an implied --cycle, - # or if --cycle has been specified - if not hasattr(self.options, "cycle") or getattr( - self.options, "cycle", True - ): - self.sendEvent(self.stopEvent) - self.log.debug("Sent a 'stop' event") - if self._pushEventsDeferred: - self.log.debug("Currently sending events. Queueing next call") - d = self._pushEventsDeferred - # Schedule another call to flush any additional queued events - d.addBoth(lambda unused: self.pushEvents()) + _started_failures = { + "connect": "failed to connect to ZenHub", + "services": "failed to retrieve a service from ZenHub", + "eventclient": "failed to configure and start the event client", + "stats": "failed to configure and start statistics recording", + } + + @defer.inlineCallbacks + def _load_initial_services(self): + msg = self._started_failures["services"] + try: + for svcname in self.initialServices: + try: + yield self.getService(svcname) + except Exception: + if self.options.cycle: + self.log.exception(msg) + else: + raise + else: + self.log.info("retrieved ZenHub service name=%s", svcname) + self.log.info("finished retrieving initial services") + except Exception as ex: + if self.options.cycle: + self.log.exception(msg) else: - d = self.pushEvents() - return d + detail = ("%s %s" % (type(ex).__name__, ex)).strip() + self.log.critical("%s: %s", msg, detail) + self.stop() - self.log.debug("No event sent as no EventService available.") + @defer.inlineCallbacks + def _started(self): + # Called when the Twisted reactor is running. + try: + # Wait for the connection to zenhub + state = "connect" + self.log.info("waiting for zenhub") + ready, self.__zenhub_ready = self.__zenhub_ready, None + yield ready - def sendEvents(self, events): - map(self.sendEvent, events) + state = "eventclient" + self._setup_event_client() - def sendEvent(self, event, **kw): - """Add event to queue of events to be sent. If we have an event - service then process the queue. - """ - generatedEvent = self.generateEvent(event, **kw) - self.eventQueueManager.addEvent(generatedEvent) - self.counters["eventCount"] += 1 - - if self._eventHighWaterMark: - return self._eventHighWaterMark - elif ( - self.eventQueueManager.event_queue_length - >= self.options.maxqueuelen * self.options.queueHighWaterMark - ): - return self.pushEvents() - else: - return defer.succeed(None) + if self.options.cycle: + state = "stats" + self._start_statistics_task() - def generateEvent(self, event, **kw): - """Add event to queue of events to be sent. If we have an event - service then process the queue. - """ - if not reactor.running: - return - eventCopy = {} - for k, v in chain(event.items(), kw.items()): - if isinstance(v, basestring): - # default max size is 512k - size = LIMITS.get(k, DEFAULT_LIMIT) - eventCopy[k] = v[0:size] if len(v) > size else v - else: - eventCopy[k] = v + state = "metrics" + self._start_internal_metrics_task() - eventCopy["agent"] = self.name - eventCopy["monitor"] = self.options.monitor - eventCopy["manager"] = self.fqdn - return eventCopy + reactor.addSystemEventTrigger("before", "shutdown", self._stop) - @defer.inlineCallbacks - def pushEventsLoop(self): - """Periodially, wake up and flush events to ZenHub.""" - reactor.callLater(self.options.eventflushseconds, self.pushEventsLoop) - yield self.pushEvents() - - # Record the number of events in the queue up to every 2 seconds. - now = time.time() - if self.rrdStats.name and now >= (self.lastStats + 2): - self.lastStats = now - self.rrdStats.gauge( - "eventQueueLength", self.eventQueueManager.event_queue_length - ) + # Schedule the `connected` method to run + reactor.callLater(0, self.connected) + except Exception as ex: + msg = self._started_failures[state] + if self.options.cycle: + self.log.exception(msg) + else: + detail = ("%s %s" % (type(ex).__name__, ex)).strip() + self.log.critical("%s: %s", msg, detail) + self.stop() @defer.inlineCallbacks - def pushEvents(self): - """Flush events to ZenHub.""" - # are we already shutting down? - if not reactor.running: - self.log.debug("Skipping event sending - reactor not running.") - return - - if ( - self.eventQueueManager.event_queue_length - >= self.options.maxqueuelen * self.options.queueHighWaterMark - and not self._eventHighWaterMark - ): - self.log.debug( - "Queue length exceeded high water mark, %s ;" - "creating high water mark deferred", - self.eventQueueManager.event_queue_length, - ) - self._eventHighWaterMark = defer.Deferred() - - # are still connected to ZenHub? - evtSvc = self.services.get("EventService", None) - if not evtSvc: - self.log.error("No event service: %r", evtSvc) - yield task.deferLater(reactor, 0, lambda: None) - if self._eventHighWaterMark: - d, self._eventHighWaterMark = self._eventHighWaterMark, None - # not connected, release throttle and let things queue - d.callback("No Event Service") - defer.returnValue(None) - - if self._pushEventsDeferred: - self.log.debug("Skipping event sending - previous call active.") - defer.returnValue("Push Pending") - - sent = 0 - try: - # only set _pushEventsDeferred after we know we have - # an evtSvc/connectivity - self._pushEventsDeferred = defer.Deferred() - - def repush(val): - if ( - self.eventQueueManager.event_queue_length - >= self.options.eventflushchunksize - ): - self.pushEvents() - return val - - # conditionally push more events after this pushEvents - # call finishes - self._pushEventsDeferred.addCallback(repush) - - discarded_events = self.eventQueueManager.discarded_events - if discarded_events: - self.log.error( - "Discarded oldest %d events because maxqueuelen was " - "exceeded: %d/%d", - discarded_events, - discarded_events + self.options.maxqueuelen, - self.options.maxqueuelen, - ) - self.counters["discardedEvents"] += discarded_events - self.eventQueueManager.discarded_events = 0 + def _stop(self): + if self.__eventclient is not None: + self.__eventclient.sendEvent(self.stopEvent) + yield self.__eventclient.stop() + self.log.debug("stopped event client") + yield self.__zhclient.stop() + + def _setup_event_client(self): + self.__eventqueue = EventQueueManager(self.options, self.log) + self.__eventclient = EventClient( + self.options, + self.__eventqueue, + self.generateEvent, + lambda: self.getService("EventService"), + ) + self.__eventclient.start() + self.__eventclient.sendEvent(self.startEvent) + self.__recordQueuedEventsCountLoop.start(2.0, now=False) + self.log.info("started event client") + + def _start_internal_metrics_task(self): + self._metrologyReporter = TwistedMetricReporter( + self.options.writeStatistics, + self.metricWriter(), + { + "zenoss_daemon": self.name, + "zenoss_monitor": self.options.monitor, + "internal": True, + }, + ) + self._metrologyReporter.start() + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_internal_metrics_task + ) + self.log.info("started internal metrics task") - send_events_fn = partial(evtSvc.callRemote, "sendEvents") - try: - sent = yield self.eventQueueManager.sendEvents(send_events_fn) - except ConnectionLost as ex: - self.log.error("Error sending event: %s", ex) - # let the reactor have time to clean up any connection - # errors and make callbacks - yield task.deferLater(reactor, 0, lambda: None) - except Exception as ex: - self.log.exception(ex) - # let the reactor have time to clean up any connection - # errors and make callbacks - yield task.deferLater(reactor, 0, lambda: None) - finally: - if self._pushEventsDeferred: - d, self._pushEventsDeferred = self._pushEventsDeferred, None - d.callback("sent %s" % sent) - if ( - self._eventHighWaterMark - and self.eventQueueManager.event_queue_length - < self.options.maxqueuelen * self.options.queueHighWaterMark - ): - self.log.debug( - "Queue restored to below high water mark: %s", - self.eventQueueManager.event_queue_length, - ) - d, self._eventHighWaterMark = self._eventHighWaterMark, None - d.callback("Queue length below high water mark") + def _stop_internal_metrics_task(self): + if self._metrologyReporter: + self._metrologyReporter.stop() + self._metrologyReporter = None + self.log.info("stopped internal metrics task") - def heartbeat(self): - """if cycling, send a heartbeat, else, shutdown""" - if not self.options.cycle: - self.stop() - return - heartbeatEvent = self.generateEvent( - self.heartbeatEvent, timeout=self.heartbeatTimeout + def _start_statistics_task(self): + self.__statistics_task = task.LoopingCall(self.postStatistics) + self.__statistics_task.start(self.options.writeStatistics, now=False) + reactor.addSystemEventTrigger( + "before", "shutdown", self._stop_statistics_task ) - self.eventQueueManager.addHeartbeatEvent(heartbeatEvent) - # heartbeat is normally 3x cycle time - self.niceDoggie(self.heartbeatTimeout / 3) + self.log.info("started statistics reporting task") + + def _stop_statistics_task(self): + if self.__statistics_task: + self.__statistics_task.stop() + self.__statistics_task = None + self.log.info("stopped statistics reporting task") def postStatisticsImpl(self): pass def postStatistics(self): # save daemon counter stats - for name, value in self.counters.items(): - self.log.info("Counter %s, value %d", name, value) + for name, value in chain( + self.counters.items(), self.__eventclient.counters.items() + ): + self.log.debug("counter %s, value %d", name, value) self.rrdStats.counter(name, value) # persist counters values - self.postStatisticsImpl() + try: + self.postStatisticsImpl() + except Exception: + self.log.exception("sub-class postStatisticsImpl method failed") def _pickleName(self): instance_id = os.environ.get("CONTROLPLANE_INSTANCE_ID") @@ -1205,140 +520,74 @@ def remote_setPropertyItems(self, items): @translateError def remote_updateThresholdClasses(self, classes): - from Products.ZenUtils.Utils import importClass + self.loadThresholdClasses(classes) - self.log.debug("Loading classes %s", classes) - for c in classes: + def loadThresholdClasses(self, classnames): + for name in classnames: try: - importClass(c) + cls = lookupClass(name) + if cls: + self.log.debug( + "already imported threshold class class=%s", name + ) + continue + importClass(name) + self.log.info("imported threshold class class=%s", name) except ImportError: - self.log.error("Unable to import class %s", c) - - def _checkZenHub(self): - """ - Check status of ZenHub (using ping method of service). - @return: if ping occurs, return deferred with result of ping attempt. - """ - self.log.debug("_checkZenHub: entry") - - def callback(result): - self.log.debug("ZenHub health check: Got result %s", result) - if result == "pong": - self.log.debug( - "ZenHub health check: " - "Success - received pong from ZenHub ping service." - ) - self._signalZenHubAnswering(True) - else: - self.log.error( - "ZenHub health check did not respond as expected." - ) - self._signalZenHubAnswering(False) - - def errback(error): - self.log.error( - "Error pinging ZenHub: %s (%s).", - error, - getattr(error, "message", ""), - ) - self._signalZenHubAnswering(False) - - try: - if self.perspective: - self.log.debug( - "ZenHub health check: " - "perspective found. attempting remote ping call." - ) - d = self.perspective.callRemote("ping") - d.addCallback(callback) - d.addErrback(errback) - return d - else: - self.log.debug("ZenHub health check: ZenHub may be down.") - self._signalZenHubAnswering(False) - except pb.DeadReferenceError: - self.log.warning( - "ZenHub health check: " - "DeadReferenceError - lost connection to ZenHub." - ) - self._signalZenHubAnswering(False) - except Exception as e: - self.log.error( - "ZenHub health check: caught %s exception: %s", - e.__class__, - e.message, - ) - self._signalZenHubAnswering(False) - - def _signalZenHubAnswering(self, answering): - """ - Write or remove file that the ZenHub_answering health check uses - to report status. - - @param answering: true if ZenHub is answering, False, otherwise. - """ - self.log.debug("_signalZenHubAnswering(%s)", answering) - filename = "zenhub_connected" - signalFilePath = zenPath("var", filename) - if answering: - self.log.debug("writing file at %s", signalFilePath) - atomicWrite(signalFilePath, "") - else: - try: - self.log.debug("removing file at %s", signalFilePath) - os.remove(signalFilePath) - except Exception as e: - self.log.debug( - "ignoring %s exception (%s) removing file %s", - e.__class__, - e.message, - signalFilePath, - ) + self.log.exception("unable to import threshold class %s", name) + except AttributeError: + self.log.exception("unable to import threshold class %s", name) def buildOptions(self): - ZenDaemon.buildOptions(self) - + super(PBDaemon, self).buildOptions() + LocalServer.buildOptions(self.parser) self.parser.add_option( "--hubhost", dest="hubhost", default=DEFAULT_HUB_HOST, - help="Host of zenhub daemon." " Default is %s." % DEFAULT_HUB_HOST, + help="Host of zenhub daemon; default %default", ) self.parser.add_option( "--hubport", dest="hubport", type="int", default=DEFAULT_HUB_PORT, - help="Port zenhub listens on." "Default is %s." % DEFAULT_HUB_PORT, + help="Port zenhub listens on; default %default", ) self.parser.add_option( "--hubusername", dest="hubusername", default=DEFAULT_HUB_USERNAME, - help="Username for zenhub login." - " Default is %s." % DEFAULT_HUB_USERNAME, + help="Username for zenhub login; default %default", ) self.parser.add_option( "--hubpassword", dest="hubpassword", default=DEFAULT_HUB_PASSWORD, - help="Password for zenhub login." - " Default is %s." % DEFAULT_HUB_PASSWORD, + help="Password for zenhub login; default %default", ) self.parser.add_option( "--monitor", dest="monitor", default=DEFAULT_HUB_MONITOR, help="Name of monitor instance to use for" - " configuration. Default is %s." % DEFAULT_HUB_MONITOR, + " configuration; default %default", ) self.parser.add_option( "--initialHubTimeout", dest="hubtimeout", type="int", default=30, - help="Initial time to wait for a ZenHub " "connection", + help="Initial time to wait for a ZenHub connection", ) + self.parser.add_option( + "--zenhubpinginterval", + dest="zhPingInterval", + default=120, + type="int", + help="How often to ping zenhub", + ) + self.parser.add_option( "--allowduplicateclears", dest="allowduplicateclears", @@ -1347,33 +596,27 @@ def buildOptions(self): help="Send clear events even when the most " "recent event was also a clear event.", ) - self.parser.add_option( "--duplicateclearinterval", dest="duplicateclearinterval", default=0, type="int", - help=( - "Send a clear event every [DUPLICATECLEARINTEVAL] " "events." - ), + help="Send a clear event every DUPLICATECLEARINTEVAL events.", ) - self.parser.add_option( "--eventflushseconds", dest="eventflushseconds", default=5.0, type="float", - help="Seconds between attempts to flush " "events to ZenHub.", + help="Seconds between attempts to flush events to ZenHub.", ) - self.parser.add_option( "--eventflushchunksize", dest="eventflushchunksize", default=50, type="int", - help="Number of events to send to ZenHub" "at one time", + help="Number of events to send to ZenHub at one time", ) - self.parser.add_option( "--maxqueuelen", dest="maxqueuelen", @@ -1381,7 +624,6 @@ def buildOptions(self): type="int", help="Maximum number of events to queue", ) - self.parser.add_option( "--queuehighwatermark", dest="queueHighWaterMark", @@ -1390,14 +632,6 @@ def buildOptions(self): help="The size, in percent, of the event queue " "when event pushback starts", ) - self.parser.add_option( - "--zenhubpinginterval", - dest="zhPingInterval", - default=120, - type="int", - help="How often to ping zenhub", - ) - self.parser.add_option( "--disable-event-deduplication", dest="deduplicate_events", @@ -1414,9 +648,8 @@ def buildOptions(self): default=publisher.defaultRedisPort ), help="redis connection string: " - "redis://[hostname]:[port]/[db], default: %default", + "redis://[hostname]:[port]/[db]; default: %default", ) - self.parser.add_option( "--metricBufferSize", dest="metricBufferSize", @@ -1438,13 +671,6 @@ def buildOptions(self): default=publisher.defaultMaxOutstandingMetrics, help="Max Number of metrics to allow in redis", ) - self.parser.add_option( - "--disable-ping-perspective", - dest="pingPerspective", - help="Enable or disable ping perspective", - default=True, - action="store_false", - ) self.parser.add_option( "--writeStatistics", dest="writeStatistics", @@ -1452,3 +678,36 @@ def buildOptions(self): default=30, help="How often to write internal statistics value in seconds", ) + + self.parser.add_option( + "--disable-ping-perspective", + dest="pingPerspective", + default=True, + action="store_false", + help="Enable or disable ping perspective", + ) + + +def _getZenHubClient(app, options): + creds = UsernamePassword(options.hubusername, options.hubpassword) + endpointDescriptor = "tcp:{host}:{port}".format( + host=options.hubhost, port=options.hubport + ) + endpoint = clientFromString(reactor, endpointDescriptor) + return ZenHubClient( + app, + endpoint, + creds, + options.hubtimeout, + reactor, + ) + + +def _getLocalServer(options): + # bind the server to the localhost interface so only local + # connections can be established. + server_endpoint_descriptor = "tcp:{port}:interface=127.0.0.1".format( + port=options.localport + ) + server_endpoint = serverFromString(reactor, server_endpoint_descriptor) + return LocalServer(reactor, server_endpoint) diff --git a/Products/ZenHub/configure.zcml b/Products/ZenHub/configure.zcml index 93e6a90254..a94114dd57 100644 --- a/Products/ZenHub/configure.zcml +++ b/Products/ZenHub/configure.zcml @@ -1,3 +1,4 @@ + - - + diff --git a/Products/ZenHub/errors.py b/Products/ZenHub/errors.py new file mode 100644 index 0000000000..5cf91e345e --- /dev/null +++ b/Products/ZenHub/errors.py @@ -0,0 +1,85 @@ +import traceback + +from twisted.spread import pb +from ZODB.POSException import ConflictError + + +class RemoteException(pb.Error, pb.Copyable, pb.RemoteCopy): + """Exception that can cross the PB barrier""" + + def __init__(self, msg, tb): + super(RemoteException, self).__init__(msg) + self.traceback = tb + + def getStateToCopy(self): + return { + "args": tuple(self.args), + "traceback": self.traceback, + } + + def setCopyableState(self, state): + self.args = state["args"] + self.traceback = state["traceback"] + + def __str__(self): + return "%s:%s" % ( + super(RemoteException, self).__str__(), + ("\n" + self.traceback) if self.traceback else " ", + ) + + +pb.setUnjellyableForClass(RemoteException, RemoteException) + + +# ZODB conflicts +class RemoteConflictError(RemoteException): + pass + + +pb.setUnjellyableForClass(RemoteConflictError, RemoteConflictError) + + +# Invalid monitor specified +class RemoteBadMonitor(RemoteException): + pass + + +pb.setUnjellyableForClass(RemoteBadMonitor, RemoteBadMonitor) + + +class HubDown(Exception): + """Raised when a connection to ZenHub is required but not available.""" + + def __init__(self, mesg="ZenHub is down"): + super(HubDown, self).__init__(mesg) + + +def translateError(callable): + """ + Decorator function to wrap remote exceptions into something + understandable by our daemon. + + @parameter callable: function to wrap + @type callable: function + @return: function's return or an exception + @rtype: various + """ + + def inner(*args, **kw): + """ + Interior decorator + """ + try: + return callable(*args, **kw) + except ConflictError as ex: + raise RemoteConflictError( + "Remote exception: %s: %s" % (ex.__class__, ex), + traceback.format_exc(), + ) + except Exception as ex: + raise RemoteException( + "Remote exception: %s: %s" % (ex.__class__, ex), + traceback.format_exc(), + ) + + return inner diff --git a/Products/ZenHub/events/__init__.py b/Products/ZenHub/events/__init__.py new file mode 100644 index 0000000000..bc956ee39b --- /dev/null +++ b/Products/ZenHub/events/__init__.py @@ -0,0 +1,13 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from .client import EventClient +from .queue.manager import EventQueueManager + +__all__ = ("EventClient", "EventQueueManager") diff --git a/Products/ZenHub/events/client.py b/Products/ZenHub/events/client.py new file mode 100644 index 0000000000..4ca1a08d9d --- /dev/null +++ b/Products/ZenHub/events/client.py @@ -0,0 +1,152 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import collections +import logging + +from functools import partial + +from twisted.internet import defer, reactor, task + +from ..errors import HubDown + +log = logging.getLogger("zen.eventclient") + +# field size limits for events +DEFAULT_LIMIT = 524288 # 512k +LIMITS = {"summary": 256, "message": 4096} + + +class EventClient(object): + """ + Manages sending events to ZenHub's event service. + """ + + def __init__(self, options, queue, builder, servicefactory): + """ + Initialize an EventClient instance. + """ + self.__queue = queue + self.__builder = builder + self.__factory = servicefactory + + self.__flushinterval = options.eventflushseconds + self.__flushchunksize = options.eventflushchunksize + self.__maxqueuelength = options.maxqueuelen + self.__limit = options.maxqueuelen * options.queueHighWaterMark + + self.__task = task.LoopingCall(self._push) + self.__taskd = None + self.__pause = None + self.__pushing = False + self.__stopping = False + + self.counters = collections.Counter() + + def start(self): # type: () -> None + """Start the event client.""" + # Note: the __taskd deferred is called when __task is stopped + self.__taskd = self.__task.start(self.__flushinterval, now=False) + self.__taskd.addCallback(self._last_push) + + def stop(self): # type: () -> defer.Deferred + """Stop the event client.""" + self.__stopping = True + if self.__pause is None: + self.__pause = defer.Deferred() + self.__task.stop() + return self.__pause + + def sendEvents(self, events): # (Sequence[dict]) -> defer.DeferredList + return defer.DeferredList([self.sendEvent(event) for event in events]) + + @defer.inlineCallbacks + def sendEvent(self, event, **kw): + """ + Add event to queue of events to be sent. + If we have an event service then process the queue. + """ + if not reactor.running: + defer.returnValue(None) + + # If __pause is not None, yield it which blocks this + # method until the deferred is called and the yield returns. + if self.__pause: + yield self.__pause + + built_event = self.__builder(event, **kw) + self.__queue.addEvent(built_event) + self.counters["eventCount"] += 1 + + def sendHeartbeat(self, event): + self.__queue.addHeartbeatEvent(event) + + @defer.inlineCallbacks + def _last_push(self, task): + yield self._push() + + @defer.inlineCallbacks + def _push(self): + """ + Flush events to ZenHub. + """ + if len(self.__queue) >= self.__limit and not self.__pause: + log.debug( + "pause accepting new events; queue length at or " + "exceeds high water mark (%s >= %s)", + len(self.__queue), + self.__limit, + ) + self.__pause = defer.Deferred() + + if self.__pushing: + log.debug("skipping event sending - previous call active.") + defer.returnValue("push pending") + + try: + self.__pushing = True + + discarded_events = self.__queue.discarded_events + if discarded_events: + log.error( + "discarded oldest %d events because maxqueuelen was " + "exceeded: %d/%d", + discarded_events, + discarded_events + self.__maxqueuelength, + self.__maxqueuelength, + ) + self.counters["discardedEvents"] += discarded_events + self.__queue.discarded_events = 0 + + eventsvc = yield self.__factory() + send_events_fn = partial(eventsvc.callRemote, "sendEvents") + count = yield self.__queue.sendEvents(send_events_fn) + if count > 0: + log.debug("sent %d event%s", count, "s" if count > 1 else "") + except HubDown as ex: + log.warn("event service unavailable: %s", ex) + except Exception as ex: + log.exception("failed to send event: %s", ex) + # let the reactor have time to clean up any connection + # errors and make callbacks + yield task.deferLater(reactor, 0, lambda: None) + finally: + self.__pushing = False + if self.__pause and len(self.__queue) < self.__limit: + # Don't log the 'resume' message during a shutdown is + # confusing to avoid confusion. + if not self.__stopping: + log.debug( + "resume accepting new events; queue length below " + "high water mark (%s < %s)", + len(self.__queue), + self.__limit, + ) + pause, self.__pause = self.__pause, None + pause.callback("Queue length below high water mark") diff --git a/Products/ZenHub/events/queue/__init__.py b/Products/ZenHub/events/queue/__init__.py new file mode 100644 index 0000000000..8a09ea402e --- /dev/null +++ b/Products/ZenHub/events/queue/__init__.py @@ -0,0 +1,4 @@ + +from .manager import EventQueueManager + +__all__ = ("EventQueueManager",) diff --git a/Products/ZenHub/events/queue/base.py b/Products/ZenHub/events/queue/base.py new file mode 100644 index 0000000000..071104d822 --- /dev/null +++ b/Products/ZenHub/events/queue/base.py @@ -0,0 +1,62 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + + +class BaseEventQueue(object): + def __init__(self, maxlen): + self.maxlen = maxlen + + def append(self, event): + """ + Appends the event to the queue. + + @param event: The event. + @return: If the queue is full, this will return the oldest event + which was discarded when this event was added. + """ + raise NotImplementedError() + + def popleft(self): + """ + Removes and returns the oldest event from the queue. If the queue + is empty, raises IndexError. + + @return: The oldest event from the queue. + @raise IndexError: If the queue is empty. + """ + raise NotImplementedError() + + def extendleft(self, events): + """ + Appends the events to the beginning of the queue (they will be the + first ones removed with calls to popleft). The list of events are + expected to be in order, with the earliest queued events listed + first. + + @param events: The events to add to the beginning of the queue. + @type events: list + @return A list of discarded events that didn't fit on the queue. + @rtype list + """ + raise NotImplementedError() + + def __len__(self): + """ + Returns the length of the queue. + + @return: The length of the queue. + """ + raise NotImplementedError() + + def __iter__(self): + """ + Returns an iterator over the elements in the queue (oldest events + are returned first). + """ + raise NotImplementedError() diff --git a/Products/ZenHub/events/queue/deduping.py b/Products/ZenHub/events/queue/deduping.py new file mode 100644 index 0000000000..52fd3abc46 --- /dev/null +++ b/Products/ZenHub/events/queue/deduping.py @@ -0,0 +1,117 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import time + +from collections import OrderedDict + +from Products.ZenHub.interfaces import ICollectorEventFingerprintGenerator + +from .base import BaseEventQueue +from .fingerprint import DefaultFingerprintGenerator +from .misc import load_utilities + + +class DeDupingEventQueue(BaseEventQueue): + """ + Event queue implementation backed by a OrderedDict. This queue performs + de-duplication of events (when an event with the same fingerprint is + seen, the 'count' field of the event is incremented by one instead of + sending an additional event). + """ + + def __init__(self, maxlen): + super(DeDupingEventQueue, self).__init__(maxlen) + self.__fingerprinters = load_utilities( + ICollectorEventFingerprintGenerator + ) + if not self.__fingerprinters: + self.__fingerprinters = [DefaultFingerprintGenerator()] + self.__queue = OrderedDict() + + def append(self, event): + # Make sure every processed event specifies the time it was queued. + if "rcvtime" not in event: + event["rcvtime"] = time.time() + + fingerprint = self._fingerprint_event(event) + if fingerprint in self.__queue: + # Remove the currently queued item - we will insert again which + # will move to the end. + current_event = self.__queue.pop(fingerprint) + event["count"] = current_event.get("count", 1) + 1 + event["firstTime"] = self._first_time(current_event, event) + self.__queue[fingerprint] = event + return + + discarded = None + if len(self.__queue) == self.maxlen: + discarded = self.popleft() + + self.__queue[fingerprint] = event + return discarded + + def popleft(self): + try: + return self.__queue.popitem(last=False)[1] + except KeyError: + # Re-raise KeyError as IndexError for common interface across + # queues. + raise IndexError() + + def extendleft(self, events): + # Attempt to de-duplicate with events currently in queue + events_to_add = [] + for event in events: + fingerprint = self._fingerprint_event(event) + if fingerprint in self.__queue: + current_event = self.__queue[fingerprint] + current_event["count"] = current_event.get("count", 1) + 1 + current_event["firstTime"] = self._first_time( + current_event, event + ) + else: + events_to_add.append(event) + + if not events_to_add: + return events_to_add + available = self.maxlen - len(self.__queue) + if not available: + return events_to_add + to_discard = 0 + if available < len(events_to_add): + to_discard = len(events_to_add) - available + old_queue, self.__queue = self.__queue, OrderedDict() + for event in events_to_add[to_discard:]: + self.__queue[self._fingerprint_event(event)] = event + for fingerprint, event in old_queue.iteritems(): + self.__queue[fingerprint] = event + return events_to_add[:to_discard] + + def __contains__(self, event): + return self._fingerprint_event(event) in self.__queue + + def __len__(self): + return len(self.__queue) + + def __iter__(self): + return self.__queue.itervalues() + + def _fingerprint_event(self, event): + for fingerprinter in self.__fingerprinters: + fingerprint = fingerprinter.generate(event) + if fingerprint is not None: + break + return fingerprint + + def _first_time(self, event1, event2): + def first(evt): + return evt.get("firstTime", evt["rcvtime"]) + + return min(first(event1), first(event2)) diff --git a/Products/ZenHub/events/queue/deque.py b/Products/ZenHub/events/queue/deque.py new file mode 100644 index 0000000000..512e42bb40 --- /dev/null +++ b/Products/ZenHub/events/queue/deque.py @@ -0,0 +1,60 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import time + +from collections import deque + +from .base import BaseEventQueue + + +class DequeEventQueue(BaseEventQueue): + """ + Event queue implementation backed by a deque. This queue does not + perform de-duplication of events. + """ + + def __init__(self, maxlen): + super(DequeEventQueue, self).__init__(maxlen) + self.__queue = deque() + + def append(self, event): + # Make sure every processed event specifies the time it was queued. + if "rcvtime" not in event: + event["rcvtime"] = time.time() + + discarded = None + if len(self.__queue) == self.maxlen: + discarded = self.popleft() + self.__queue.append(event) + return discarded + + def popleft(self): + return self.__queue.popleft() + + def extendleft(self, events): + if not events: + return events + available = self.maxlen - len(self.__queue) + if not available: + return events + to_discard = 0 + if available < len(events): + to_discard = len(events) - available + self.__queue.extendleft(reversed(events[to_discard:])) + return events[:to_discard] + + def __contains__(self, event): + return event in self.__queue + + def __len__(self): + return len(self.__queue) + + def __iter__(self): + return iter(self.__queue) diff --git a/Products/ZenHub/events/queue/fingerprint.py b/Products/ZenHub/events/queue/fingerprint.py new file mode 100644 index 0000000000..b1000b79ac --- /dev/null +++ b/Products/ZenHub/events/queue/fingerprint.py @@ -0,0 +1,25 @@ +from hashlib import sha1 + +from zope.interface import implementer + +from Products.ZenHub.interfaces import ICollectorEventFingerprintGenerator + + +@implementer(ICollectorEventFingerprintGenerator) +class DefaultFingerprintGenerator(object): + """Generates a fingerprint using a checksum of properties of the event.""" + + weight = 100 + + _IGNORE_FIELDS = ("rcvtime", "firstTime", "lastTime") + + def generate(self, event): + fields = [] + for k, v in sorted(event.iteritems()): + if k not in DefaultFingerprintGenerator._IGNORE_FIELDS: + if isinstance(v, unicode): + v = v.encode("utf-8") + else: + v = str(v) + fields.extend((k, v)) + return sha1("|".join(fields)).hexdigest() diff --git a/Products/ZenHub/events/queue/manager.py b/Products/ZenHub/events/queue/manager.py new file mode 100644 index 0000000000..43054c324f --- /dev/null +++ b/Products/ZenHub/events/queue/manager.py @@ -0,0 +1,257 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import time + +from collections import deque +from itertools import chain + +import six + +from metrology import Metrology +from metrology.instruments import Gauge +from metrology.registry import registry +from twisted.internet import defer + +from Products.ZenEvents.ZenEventClasses import Clear +from Products.ZenHub.interfaces import ( + ICollectorEventTransformer, + TRANSFORM_DROP, + TRANSFORM_STOP, +) + +from .misc import load_utilities +from .deduping import DeDupingEventQueue +from .deque import DequeEventQueue + + +class EventQueueManager(object): + + CLEAR_FINGERPRINT_FIELDS = ( + "device", + "component", + "eventKey", + "eventClass", + ) + + def __init__(self, options, log): + self.options = options + self.transformers = load_utilities(ICollectorEventTransformer) + self.log = log + self.discarded_events = 0 + # TODO: Do we want to limit the size of the clear event dictionary? + self.clear_events_count = {} + self._initQueues() + self._eventsSent = Metrology.meter("collectordaemon.eventsSent") + self._discardedEvents = Metrology.meter( + "collectordaemon.discardedEvent" + ) + self._eventTimer = Metrology.timer("collectordaemon.eventTimer") + metricNames = {x[0] for x in registry} + if "collectordaemon.eventQueue" not in metricNames: + queue = self + + class EventQueueGauge(Gauge): + @property + def value(self): + return len(queue) + + Metrology.gauge("collectordaemon.eventQueue", EventQueueGauge()) + + def __len__(self): + return ( + len(self.event_queue) + + len(self.perf_event_queue) + + len(self.heartbeat_event_queue) + ) + + def _initQueues(self): + maxlen = self.options.maxqueuelen + queue_type = ( + DeDupingEventQueue + if self.options.deduplicate_events + else DequeEventQueue + ) + self.event_queue = queue_type(maxlen) + self.perf_event_queue = queue_type(maxlen) + self.heartbeat_event_queue = deque(maxlen=1) + + def _transformEvent(self, event): + for transformer in self.transformers: + result = transformer.transform(event) + if result == TRANSFORM_DROP: + self.log.debug( + "event dropped by transform %s: %s", transformer, event + ) + return None + if result == TRANSFORM_STOP: + break + return event + + def _clearFingerprint(self, event): + return tuple( + event.get(field, "") for field in self.CLEAR_FINGERPRINT_FIELDS + ) + + def _removeDiscardedEventFromClearState(self, discarded): + # + # There is a particular condition that could cause clear events to + # never be sent until a collector restart. + # Consider the following sequence: + # + # 1) Clear event added to queue. This is the first clear event of + # this type and so it is added to the clear_events_count + # dictionary with a count of 1. + # 2) A large number of additional events are queued until maxqueuelen + # is reached, and so the queue starts to discard events including + # the clear event from #1. + # 3) The same clear event in #1 is sent again, however this time it + # is dropped because allowduplicateclears is False and the event + # has a > 0 count. + # + # To resolve this, we are careful to track all discarded events, and + # remove their state from the clear_events_count dictionary. + # + opts = self.options + if not opts.allowduplicateclears and opts.duplicateclearinterval == 0: + severity = discarded.get("severity", -1) + if severity == Clear: + clear_fingerprint = self._clearFingerprint(discarded) + if clear_fingerprint in self.clear_events_count: + self.clear_events_count[clear_fingerprint] -= 1 + + def _addEvent(self, queue, event): + if self._transformEvent(event) is None: + return + + allowduplicateclears = self.options.allowduplicateclears + duplicateclearinterval = self.options.duplicateclearinterval + if not allowduplicateclears or duplicateclearinterval > 0: + clear_fingerprint = self._clearFingerprint(event) + severity = event.get("severity", -1) + if severity != Clear: + # A non-clear event - clear out count if it exists + self.clear_events_count.pop(clear_fingerprint, None) + else: + current_count = self.clear_events_count.get( + clear_fingerprint, 0 + ) + self.clear_events_count[clear_fingerprint] = current_count + 1 + if not allowduplicateclears and current_count != 0: + self.log.debug( + "allowduplicateclears dropping clear event %r", event + ) + return + if ( + duplicateclearinterval > 0 + and current_count % duplicateclearinterval != 0 + ): + self.log.debug( + "duplicateclearinterval dropping clear event %r", event + ) + return + + discarded = queue.append(event) + self.log.debug("queued event (total of %d) %r", len(queue), event) + if discarded: + self.log.warn("discarded event - queue overflow: %r", discarded) + self._removeDiscardedEventFromClearState(discarded) + self.discarded_events += 1 + self._discardedEvents.mark() + + def addEvent(self, event): + self._addEvent(self.event_queue, event) + + def addPerformanceEvent(self, event): + self._addEvent(self.perf_event_queue, event) + + def addHeartbeatEvent(self, heartbeat_event): + self.heartbeat_event_queue.append(heartbeat_event) + + @defer.inlineCallbacks + def sendEvents(self, event_sender_fn): + # Create new queues - we will flush the current queues and don't want + # to get in a loop sending events that are queued while we send this + # batch (the event sending is asynchronous). + prev_heartbeat_event_queue = self.heartbeat_event_queue + prev_perf_event_queue = self.perf_event_queue + prev_event_queue = self.event_queue + self._initQueues() + + perf_events = [] + events = [] + sent = 0 + try: + def chunk_events(): + chunk_remaining = self.options.eventflushchunksize + heartbeat_events = [] + num_heartbeat_events = min( + chunk_remaining, len(prev_heartbeat_event_queue) + ) + for _ in six.moves.range(num_heartbeat_events): + heartbeat_events.append( + prev_heartbeat_event_queue.popleft() + ) + chunk_remaining -= num_heartbeat_events + + perf_events = [] + num_perf_events = min( + chunk_remaining, len(prev_perf_event_queue) + ) + for _ in six.moves.range(num_perf_events): + perf_events.append(prev_perf_event_queue.popleft()) + chunk_remaining -= num_perf_events + + events = [] + num_events = min(chunk_remaining, len(prev_event_queue)) + for _ in six.moves.range(num_events): + events.append(prev_event_queue.popleft()) + return heartbeat_events, perf_events, events + + heartbeat_events, perf_events, events = chunk_events() + while heartbeat_events or perf_events or events: + self.log.debug( + "sending %d events, %d perf events, %d heartbeats", + len(events), + len(perf_events), + len(heartbeat_events), + ) + start = time.time() + yield event_sender_fn(heartbeat_events + perf_events + events) + duration = int((time.time() - start) * 1000) + self._eventTimer.update(duration) + sent += len(events) + len(perf_events) + len(heartbeat_events) + self._eventsSent.mark(len(events)) + self._eventsSent.mark(len(perf_events)) + self._eventsSent.mark(len(heartbeat_events)) + heartbeat_events, perf_events, events = chunk_events() + + defer.returnValue(sent) + except Exception: + # Restore performance events that failed to send + perf_events.extend(prev_perf_event_queue) + discarded_perf_events = self.perf_event_queue.extendleft( + perf_events + ) + self.discarded_events += len(discarded_perf_events) + self._discardedEvents.mark(len(discarded_perf_events)) + + # Restore events that failed to send + events.extend(prev_event_queue) + discarded_events = self.event_queue.extendleft(events) + self.discarded_events += len(discarded_events) + self._discardedEvents.mark(len(discarded_events)) + + # Remove any clear state for events that were discarded + for discarded in chain(discarded_perf_events, discarded_events): + self.log.debug( + "discarded event - queue overflow: %r", discarded + ) + self._removeDiscardedEventFromClearState(discarded) + raise diff --git a/Products/ZenHub/events/queue/misc.py b/Products/ZenHub/events/queue/misc.py new file mode 100644 index 0000000000..21de0f07df --- /dev/null +++ b/Products/ZenHub/events/queue/misc.py @@ -0,0 +1,21 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from zope.component import getUtilitiesFor + + +def load_utilities(utility_class): + """ + Loads ZCA utilities of the specified class. + + @param utility_class: The type of utility to load. + @return: A list of utilities, sorted by their 'weight' attribute. + """ + utilities = (f for n, f in getUtilitiesFor(utility_class)) + return sorted(utilities, key=lambda f: getattr(f, "weight", 100)) diff --git a/Products/ZenHub/events/queue/tests/__init__.py b/Products/ZenHub/events/queue/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenHub/events/queue/tests/test_base.py b/Products/ZenHub/events/queue/tests/test_base.py new file mode 100644 index 0000000000..9039d1eb79 --- /dev/null +++ b/Products/ZenHub/events/queue/tests/test_base.py @@ -0,0 +1,33 @@ +from unittest import TestCase + +from ..base import BaseEventQueue + + +class BaseEventQueueTest(TestCase): + def setUp(t): + t.beq = BaseEventQueue(maxlen=5) + + def test_init(t): + base_event_queue = BaseEventQueue(maxlen=5) + t.assertEqual(base_event_queue.maxlen, 5) + + def test_append(t): + with t.assertRaises(NotImplementedError): + t.beq.append("event") + + def test_popleft(t): + with t.assertRaises(NotImplementedError): + t.beq.popleft() + + def test_extendleft(t): + with t.assertRaises(NotImplementedError): + t.beq.extendleft(["event_a", "event_b"]) + + def test___len__(t): + with t.assertRaises(NotImplementedError): + len(t.beq) + + def test___iter__(t): + with t.assertRaises(NotImplementedError): + for _ in t.beq: + pass diff --git a/Products/ZenHub/events/queue/tests/test_deduping.py b/Products/ZenHub/events/queue/tests/test_deduping.py new file mode 100644 index 0000000000..98430a8612 --- /dev/null +++ b/Products/ZenHub/events/queue/tests/test_deduping.py @@ -0,0 +1,234 @@ +from mock import Mock, patch +from unittest import TestCase + +from ..deduping import DeDupingEventQueue +from ..fingerprint import DefaultFingerprintGenerator + +PATH = {"src": "Products.ZenHub.events.queue.deduping"} + + +class DeDupingEventQueueTest(TestCase): + def setUp(t): + t.ddeq = DeDupingEventQueue(maxlen=10) + t.event_a, t.event_b = {"name": "event_a"}, {"name": "event_b"} + + @patch("{src}.load_utilities".format(**PATH)) + def test_init(t, load_utilities): + load_utilities.return_value = [] + ddeq = DeDupingEventQueue(maxlen=10) + t.assertEqual(ddeq.maxlen, 10) + + default = DefaultFingerprintGenerator() + expected = default.generate(t.event_a) + actual = ddeq._fingerprint_event(t.event_a) + + t.assertEqual(actual, expected) + + def test_fingerprint_event(t): + t.ddeq.fingerprinters = [] + + ret = t.ddeq._fingerprint_event(t.event_a) + expected = DefaultFingerprintGenerator().generate(t.event_a) + t.assertEqual(ret, expected) + + # Identical events generate the same fingerprint + event_2 = t.event_a.copy() + ret = t.ddeq._fingerprint_event(event_2) + t.assertEqual(ret, expected) + + @patch("{src}.load_utilities".format(**PATH)) + def test_fingerprint_event_fingerprinters_list(t, load_utilities): + """_fingerprint_event will attempt to generate a fingerprint from + each ICollectorEventFingerprintGenerator it loaded, + and return the first non-falsey value from them + """ + fp1 = Mock(spec_set=["generate"]) + fp1.generate.return_value = None + fp2 = Mock(spec_set=["generate"]) + fp2.generate.side_effect = lambda x: str(x) + # fp2 returns a value, so fp3 is never called + fp3 = Mock(spec_set=["generate"]) + fp3.generate.side_effect = lambda x: 1 / 0 + load_utilities.return_value = [fp1, fp2, fp3] + ddeq = DeDupingEventQueue(maxlen=10) + + ret = ddeq._fingerprint_event(t.event_a) + + fp1.generate.assert_called_with(t.event_a) + fp2.generate.assert_called_with(t.event_a) + fp3.generate.assert_not_called() + t.assertEqual(ret, str(t.event_a)) + + def test_first_time(t): + """given 2 events, retrun the earliest timestamp of the two + use 'firstTime' if available, else 'rcvtime' + """ + event1 = {"firstTime": 1, "rcvtime": 0} + event2 = {"rcvtime": 2} + + ret = t.ddeq._first_time(event1, event2) + t.assertEqual(ret, 1) + + event1 = {"firstTime": 3, "rcvtime": 1} + event2 = {"rcvtime": 2} + + ret = t.ddeq._first_time(event1, event2) + t.assertEqual(ret, 2) + + @patch("{src}.time".format(**PATH)) + def test_append_timestamp(t, time): + """Make sure every processed event specifies the time it was queued.""" + t.ddeq.append(t.event_a) + event = t.ddeq.popleft() + + t.assertEqual(event["rcvtime"], time.time.return_value) + + @patch("{src}.time".format(**PATH)) + def test_append_deduplication(t, time): + """The same event cannot be added to the queue twice + appending a duplicate event replaces the original + """ + event1 = {"data": "some data"} + event2 = {"data": "some data"} + t.assertEqual(event1, event2) + + t.ddeq.append(event1) + t.ddeq.append(event2) + + t.assertEqual(len(t.ddeq), 1) + + ret = t.ddeq.popleft() + # The new event replaces the old one + t.assertIs(ret, event2) + t.assertEqual(event2["count"], 2) + + @patch("{src}.time".format(**PATH)) + def test_append_deduplicates_and_counts_events(t, time): + time.time.side_effect = (t for t in range(100)) + t.ddeq.append({"name": "event_a"}) + t.assertEqual(list(t.ddeq), [{"rcvtime": 0, "name": "event_a"}]) + t.ddeq.append({"name": "event_a"}) + t.assertEqual( + list(t.ddeq), + [{"rcvtime": 1, "firstTime": 0, "count": 2, "name": "event_a"}], + ) + t.ddeq.append({"name": "event_a"}) + t.assertEqual( + list(t.ddeq), + [{"rcvtime": 2, "firstTime": 0, "count": 3, "name": "event_a"}], + ) + t.ddeq.append({"name": "event_a"}) + t.assertEqual( + list(t.ddeq), + [{"rcvtime": 3, "firstTime": 0, "count": 4, "name": "event_a"}], + ) + + def test_append_pops_and_returns_leftmost_if_full(t): + t.ddeq.maxlen = 1 + + t.ddeq.append(t.event_a) + ret = t.ddeq.append(t.event_b) + + # NOTE: events are stored in a dict, key=fingerprint + t.assertIn(t.event_b, t.ddeq) + t.assertNotIn(t.event_a, t.ddeq) + t.assertEqual(ret, t.event_a) + + def test_popleft(t): + t.ddeq.append(t.event_a) + t.ddeq.append(t.event_b) + + ret = t.ddeq.popleft() + + t.assertEqual(ret, t.event_a) + + def test_popleft_raises_IndexError(t): + """Raises IndexError instead of KeyError, for api compatability""" + with t.assertRaises(IndexError): + t.ddeq.popleft() + + @patch("{src}.time".format(**PATH)) + def test_extendleft(t, time): + """WARNING: extendleft does NOT add timestamps, as .append does + is this behavior is intentional? + """ + event_c = {"name": "event_c"} + t.ddeq.append(event_c) + t.assertEqual(list(t.ddeq), [event_c]) + events = [t.event_a, t.event_b] + + ret = t.ddeq.extendleft(events) + + t.assertEqual(ret, []) + t.assertEqual(list(t.ddeq), [t.event_a, t.event_b, event_c]) + """ + # to validate all events get timestamps + t.assertEqual( + list(t.ddeq), + [{'name': 'event_a', 'rcvtime': time.time.return_value}, + {'name': 'event_b', 'rcvtime': time.time.return_value}, + {'name': 'event_c', 'rcvtime': time.time.return_value}, + ] + ) + """ + + @patch("{src}.time".format(**PATH)) + def test_extendleft_counts_events_BUG(t, time): + time.time.side_effect = (t for t in range(100)) + t.ddeq.extendleft([{"name": "event_a"}, {"name": "event_b"}]) + t.assertEqual( + list(t.ddeq), + # This should work + # [{'rcvtime': 0, 'name': 'event_a'}] + # current behavior + [{"name": "event_a"}, {"name": "event_b"}], + ) + # rcvtime is required, but is not set by extendleft + with t.assertRaises(KeyError): + t.ddeq.extendleft([{"name": "event_a"}, {"name": "event_b"}]) + """ + Test Breaks Here due to missing rcvtime + t.assertEqual( + list(t.ddeq), + [{'rcvtime': 1, 'firstTime': 0, 'count': 2, 'name': 'event_a'}, + {'rcvtime': 1, 'firstTime': 0, 'count': 2, 'name': 'event_b'}] + ) + t.ddeq.extendleft([{'name': 'event_a'}, {'name': 'event_b'}]) + t.assertEqual( + list(t.ddeq), + [{'rcvtime': 2, 'firstTime': 0, 'count': 3, 'name': 'event_a'}, + {'rcvtime': 2, 'firstTime': 0, 'count': 3, 'name': 'event_b'}] + ) + t.ddeq.extendleft([{'name': 'event_a'}, {'name': 'event_b'}]) + t.assertEqual( + list(t.ddeq), + [{'rcvtime': 3, 'firstTime': 0, 'count': 4, 'name': 'event_a'}, + {'rcvtime': 3, 'firstTime': 0, 'count': 4, 'name': 'event_b'}] + ) + """ + + def test_extendleft_returns_events_if_empty(t): + ret = t.ddeq.extendleft([]) + t.assertEqual(ret, []) + + def test_extendleft_returns_extra_events_if_nearly_full(t): + t.ddeq.maxlen = 3 + t.ddeq.extendleft([t.event_a, t.event_b]) + event_c, event_d = {"name": "event_c"}, {"name": "event_d"} + events = [event_c, event_d] + + ret = t.ddeq.extendleft(events) + + t.assertEqual(list(t.ddeq), [event_d, t.event_a, t.event_b]) + t.assertEqual(ret, [event_c]) + + def test___len__(t): + ret = len(t.ddeq) + t.assertEqual(ret, 0) + t.ddeq.extendleft([t.event_a, t.event_b]) + t.assertEqual(len(t.ddeq), 2) + + def test___iter__(t): + t.ddeq.extendleft([t.event_a, t.event_b]) + ret = list(t.ddeq) + t.assertEqual(ret, [t.event_a, t.event_b]) diff --git a/Products/ZenHub/events/queue/tests/test_deque.py b/Products/ZenHub/events/queue/tests/test_deque.py new file mode 100644 index 0000000000..593a94ac2f --- /dev/null +++ b/Products/ZenHub/events/queue/tests/test_deque.py @@ -0,0 +1,102 @@ +from mock import patch +from unittest import TestCase + +from ..deque import DequeEventQueue + +PATH = {"src": "Products.ZenHub.events.queue.deque"} + + +class DequeEventQueueTest(TestCase): + def setUp(t): + t.deq = DequeEventQueue(maxlen=10) + t.event_a, t.event_b = {"name": "event_a"}, {"name": "event_b"} + + def test_init(t): + maxlen = 100 + deq = DequeEventQueue(maxlen=maxlen) + t.assertEqual(deq.maxlen, maxlen) + + @patch("{src}.time".format(**PATH)) + def test_append(t, time): + event = {} + deq = DequeEventQueue(maxlen=10) + + ret = deq.append(event) + + # append sets the time the event was added to the queue + t.assertEqual(event["rcvtime"], time.time()) + t.assertEqual(ret, None) + + def test_append_pops_and_returns_leftmost_if_full(t): + event_a, event_b = {"name": "event_a"}, {"name": "event_b"} + deq = DequeEventQueue(maxlen=1) + + deq.append(event_a) + ret = deq.append(event_b) + + t.assertIn(event_b, deq) + t.assertNotIn(event_a, deq) + t.assertEqual(ret, event_a) + + @patch("{src}.time".format(**PATH)) + def test_popleft(t, time): + t.deq.append(t.event_a) + t.deq.append(t.event_b) + + ret = t.deq.popleft() + + t.assertEqual(ret, t.event_a) + + @patch("{src}.time".format(**PATH)) + def test_extendleft(t, time): + """WARNING: extendleft does NOT add timestamps, as .append does + is this behavior is intentional? + """ + event_c = {"name": "event_c"} + t.deq.append(event_c) + t.assertEqual(list(t.deq), [event_c]) + events = [t.event_a, t.event_b] + + ret = t.deq.extendleft(events) + + t.assertEqual(ret, []) + t.assertEqual(list(t.deq), [t.event_a, t.event_b, event_c]) + """ + # to validate all events get timestamps + t.assertEqual( + list(t.deq), + [{'name': 'event_a', 'rcvtime': time.time.return_value}, + {'name': 'event_b', 'rcvtime': time.time.return_value}, + {'name': 'event_c', 'rcvtime': time.time.return_value}, + ] + """ + + def test_extendleft_returns_events_if_falsey(t): + ret = t.deq.extendleft(False) + t.assertEqual(ret, False) + ret = t.deq.extendleft([]) + t.assertEqual(ret, []) + ret = t.deq.extendleft(0) + t.assertEqual(ret, 0) + + def test_extendleft_returns_extra_events_if_nearly_full(t): + t.deq.maxlen = 3 + t.deq.extendleft([t.event_a, t.event_b]) + event_c, event_d = {"name": "event_c"}, {"name": "event_d"} + events = [event_c, event_d] + + ret = t.deq.extendleft(events) + + t.assertEqual(list(t.deq), [event_d, t.event_a, t.event_b]) + t.assertEqual(ret, [event_c]) + + def test___len__(t): + ret = len(t.deq) + t.assertEqual(ret, 0) + t.deq.extendleft([t.event_a, t.event_b]) + t.assertEqual(len(t.deq), 2) + + def test___iter__(t): + t.deq.extendleft([t.event_a, t.event_b]) + ret = [event for event in t.deq] + t.assertEqual(ret, [t.event_a, t.event_b]) diff --git a/Products/ZenHub/events/queue/tests/test_fingerprint.py b/Products/ZenHub/events/queue/tests/test_fingerprint.py new file mode 100644 index 0000000000..f22ccaa8b2 --- /dev/null +++ b/Products/ZenHub/events/queue/tests/test_fingerprint.py @@ -0,0 +1,55 @@ +from unittest import TestCase + +from zope.interface.verify import verifyObject + +from ..fingerprint import ( + DefaultFingerprintGenerator, + ICollectorEventFingerprintGenerator, + sha1, +) + + +class DefaultFingerprintGeneratorTest(TestCase): + def test_init(t): + fingerprint_generator = DefaultFingerprintGenerator() + + # the class Implements the Interface + t.assertTrue( + ICollectorEventFingerprintGenerator.implementedBy( + DefaultFingerprintGenerator + ) + ) + # the object provides the interface + t.assertTrue( + ICollectorEventFingerprintGenerator.providedBy( + fingerprint_generator + ) + ) + # Verify the object implments the interface properly + verifyObject( + ICollectorEventFingerprintGenerator, fingerprint_generator + ) + + def test_generate(t): + """Takes an event, chews it up and spits out a sha1 hash + without an intermediate function that returns its internal fields list + we have to duplicate the entire function in test. + REFACTOR: split this up so we can test the fields list generator + and sha generator seperately. + Any method of generating the a hash from the dict should work so long + as its the same hash for the event with the _IGNORE_FILEDS stripped off + """ + event = {"k%s" % i: "v%s" % i for i in range(3)} + fields = [] + for k, v in sorted(event.iteritems()): + fields.extend((k, v)) + expected = sha1("|".join(fields)).hexdigest() + + # any keys listed in _IGNORE_FIELDS are not hashed + for key in DefaultFingerprintGenerator._IGNORE_FIELDS: + event[key] = "IGNORE ME!" + + fingerprint_generator = DefaultFingerprintGenerator() + out = fingerprint_generator.generate(event) + + t.assertEqual(out, expected) diff --git a/Products/ZenHub/events/queue/tests/test_manager.py b/Products/ZenHub/events/queue/tests/test_manager.py new file mode 100644 index 0000000000..394bb1a4e8 --- /dev/null +++ b/Products/ZenHub/events/queue/tests/test_manager.py @@ -0,0 +1,328 @@ +import collections + +from unittest import TestCase +from mock import MagicMock, Mock, create_autospec, call + +# Breaks Test Isolation. Products/ZenHub/metricpublisher/utils.py:15 +# ImportError: No module named eventlet +from Products.ZenHub.PBDaemon import Clear, defer +from ..deduping import DeDupingEventQueue +from ..manager import EventQueueManager, TRANSFORM_DROP, TRANSFORM_STOP + +PATH = {"src": "Products.ZenHub.PBDaemon"} + + +class EventQueueManagerTest(TestCase): + def setUp(t): + options = Mock( + name="options", + spec_set=[ + "maxqueuelen", + "deduplicate_events", + "allowduplicateclears", + "duplicateclearinterval", + "eventflushchunksize", + ], + ) + options.deduplicate_events = True + log = Mock(name="logger.log", spec_set=["debug", "warn"]) + + t.eqm = EventQueueManager(options, log) + t.eqm._initQueues() + + def test_initQueues(t): + options = Mock( + name="options", spec_set=["maxqueuelen", "deduplicate_events"] + ) + options.deduplicate_events = True + log = Mock(name="logger.log", spec_set=[]) + + eqm = EventQueueManager(options, log) + eqm._initQueues() + + t.assertIsInstance(eqm.event_queue, DeDupingEventQueue) + t.assertEqual(eqm.event_queue.maxlen, options.maxqueuelen) + t.assertIsInstance(eqm.perf_event_queue, DeDupingEventQueue) + t.assertEqual(eqm.perf_event_queue.maxlen, options.maxqueuelen) + t.assertIsInstance(eqm.heartbeat_event_queue, collections.deque) + t.assertEqual(eqm.heartbeat_event_queue.maxlen, 1) + + def test_transformEvent(t): + """a transformer mutates and returns an event""" + + def transform(event): + event["transformed"] = True + return event + + transformer = Mock(name="transformer", spec_set=["transform"]) + transformer.transform.side_effect = transform + t.eqm.transformers = [transformer] + + event = {} + ret = t.eqm._transformEvent(event) + + t.assertEqual(ret, event) + t.assertEqual(event, {"transformed": True}) + + def test_transformEvent_drop(t): + """if a transformer returns TRANSFORM_DROP + stop running the event through transformer, and return None + """ + + def transform_drop(event): + return TRANSFORM_DROP + + def transform_bomb(event): + 0 / 0 + + transformer = Mock(name="transformer", spec_set=["transform"]) + transformer.transform.side_effect = transform_drop + transformer_2 = Mock(name="transformer", spec_set=["transform"]) + transformer_2.transform.side_effect = transform_bomb + + t.eqm.transformers = [transformer, transformer_2] + + event = {} + ret = t.eqm._transformEvent(event) + t.assertEqual(ret, None) + + def test_transformEvent_stop(t): + """if a transformer returns TRANSFORM_STOP + stop running the event through transformers, and return the event + """ + + def transform_drop(event): + return TRANSFORM_STOP + + def transform_bomb(event): + 0 / 0 + + transformer = Mock(name="transformer", spec_set=["transform"]) + transformer.transform.side_effect = transform_drop + transformer_2 = Mock(name="transformer", spec_set=["transform"]) + transformer_2.transform.side_effect = transform_bomb + + t.eqm.transformers = [transformer, transformer_2] + + event = {} + ret = t.eqm._transformEvent(event) + t.assertIs(ret, event) + + def test_clearFingerprint(t): + event = {k: k + "_v" for k in t.eqm.CLEAR_FINGERPRINT_FIELDS} + + ret = t.eqm._clearFingerprint(event) + + t.assertEqual( + ret, ("device_v", "component_v", "eventKey_v", "eventClass_v") + ) + + def test__removeDiscardedEventFromClearState(t): + """if the event's fingerprint is in clear_events_count + decrement its value + """ + t.eqm.options.allowduplicateclears = False + t.eqm.options.duplicateclearinterval = 0 + + discarded = {"severity": Clear} + clear_fingerprint = t.eqm._clearFingerprint(discarded) + t.eqm.clear_events_count[clear_fingerprint] = 3 + + t.eqm._removeDiscardedEventFromClearState(discarded) + + t.assertEqual(t.eqm.clear_events_count[clear_fingerprint], 2) + + def test__addEvent(t): + """remove the event from clear_events_count + and append it to the queue + """ + t.eqm.options.allowduplicateclears = False + + queue = MagicMock(name="queue", spec_set=["append", "__len__"]) + event = {} + clear_fingerprint = t.eqm._clearFingerprint(event) + t.eqm.clear_events_count = {clear_fingerprint: 3} + + t.eqm._addEvent(queue, event) + + t.assertNotIn(clear_fingerprint, t.eqm.clear_events_count) + queue.append.assert_called_with(event) + + def test__addEvent_status_clear(t): + t.eqm.options.allowduplicateclears = False + t.eqm.options.duplicateclearinterval = 0 + + queue = MagicMock(name="queue", spec_set=["append", "__len__"]) + event = {"severity": Clear} + clear_fingerprint = t.eqm._clearFingerprint(event) + + t.eqm._addEvent(queue, event) + + t.assertEqual(t.eqm.clear_events_count[clear_fingerprint], 1) + queue.append.assert_called_with(event) + + def test__addEvent_drop_duplicate_clear_events(t): + t.eqm.options.allowduplicateclears = False + clear_count = 1 + + queue = MagicMock(name="queue", spec_set=["append", "__len__"]) + event = {"severity": Clear} + clear_fingerprint = t.eqm._clearFingerprint(event) + t.eqm.clear_events_count = {clear_fingerprint: clear_count} + + t.eqm._addEvent(queue, event) + + # non-clear events are not added to the clear_events_count dict + t.assertNotIn(t.eqm.clear_events_count, clear_fingerprint) + + queue.append.assert_not_called() + + def test__addEvent_drop_duplicate_clear_events_interval(t): + t.eqm.options.allowduplicateclears = False + clear_count = 3 + t.eqm.options.duplicateclearinterval = clear_count + + queue = MagicMock(name="queue", spec_set=["append", "__len__"]) + event = {"severity": Clear} + clear_fingerprint = t.eqm._clearFingerprint(event) + t.eqm.clear_events_count = {clear_fingerprint: clear_count} + + t.eqm._addEvent(queue, event) + + # non-clear events are not added to the clear_events_count dict + t.assertNotIn(t.eqm.clear_events_count, clear_fingerprint) + queue.append.assert_not_called() + + def test__addEvent_counts_discarded_events(t): + queue = MagicMock(name="queue", spec_set=["append", "__len__"]) + event = {} + discarded_event = {"name": "event"} + queue.append.return_value = discarded_event + + t.eqm._removeDiscardedEventFromClearState = create_autospec( + t.eqm._removeDiscardedEventFromClearState, + ) + t.eqm._discardedEvents.mark = create_autospec( + t.eqm._discardedEvents.mark + ) + + t.eqm._addEvent(queue, event) + + t.eqm._removeDiscardedEventFromClearState.assert_called_with( + discarded_event + ) + t.eqm._discardedEvents.mark.assert_called_with() + t.assertEqual(t.eqm.discarded_events, 1) + + def test_addEvent(t): + t.eqm._addEvent = create_autospec(t.eqm._addEvent) + event = {} + t.eqm.addEvent(event) + + t.eqm._addEvent.assert_called_with(t.eqm.event_queue, event) + + def test_addPerformanceEvent(t): + t.eqm._addEvent = create_autospec(t.eqm._addEvent) + event = {} + t.eqm.addPerformanceEvent(event) + + t.eqm._addEvent.assert_called_with(t.eqm.perf_event_queue, event) + + def test_addHeartbeatEvent(t): + heartbeat_event_queue = Mock(spec_set=t.eqm.heartbeat_event_queue) + t.eqm.heartbeat_event_queue = heartbeat_event_queue + heartbeat_event = {} + t.eqm.addHeartbeatEvent(heartbeat_event) + + heartbeat_event_queue.append.assert_called_with(heartbeat_event) + + def test_sendEvents(t): + """chunks events from EventManager's queues + yields them to the event_sender_fn + and returns a deffered with a result of events sent count + """ + t.eqm.options.eventflushchunksize = 3 + t.eqm.options.maxqueuelen = 5 + t.eqm._initQueues() + heartbeat_events = [{"heartbeat": i} for i in range(2)] + perf_events = [{"perf_event": i} for i in range(2)] + events = [{"event": i} for i in range(2)] + + t.eqm.heartbeat_event_queue.extendleft(heartbeat_events) + # heartbeat_event_queue set to static maxlen=1 + t.assertEqual(len(t.eqm.heartbeat_event_queue), 1) + t.eqm.perf_event_queue.extendleft(perf_events) + t.eqm.event_queue.extendleft(events) + + event_sender_fn = Mock(name="event_sender_fn") + + ret = t.eqm.sendEvents(event_sender_fn) + + # Priority: heartbeat, perf, event + event_sender_fn.assert_has_calls( + [ + call([heartbeat_events[1], perf_events[0], perf_events[1]]), + call([events[0], events[1]]), + ] + ) + t.assertIsInstance(ret, defer.Deferred) + t.assertEqual(ret.result, 5) + + def test_sendEvents_exception_handling(t): + """In case of exception, places events back in the queue, + and remove clear state for any discarded events + """ + t.eqm.options.eventflushchunksize = 3 + t.eqm.options.maxqueuelen = 5 + t.eqm._initQueues() + heartbeat_events = [{"heartbeat": i} for i in range(2)] + perf_events = [{"perf_event": i} for i in range(2)] + events = [{"event": i} for i in range(2)] + + t.eqm.heartbeat_event_queue.extendleft(heartbeat_events) + t.eqm.perf_event_queue.extendleft(perf_events) + t.eqm.event_queue.extendleft(events) + + def event_sender_fn(args): + raise Exception("event_sender_fn failed") + + ret = t.eqm.sendEvents(event_sender_fn) + # validate Exception was raised + t.assertEqual(ret.result.check(Exception), Exception) + # quash the unhandled error in defferd exception + ret.addErrback(Mock()) + + # Heartbeat events get dropped + t.assertNotIn(heartbeat_events[1], t.eqm.heartbeat_event_queue) + # events and perf_events are returned to the queues + t.assertIn(perf_events[0], t.eqm.perf_event_queue) + t.assertIn(events[0], t.eqm.event_queue) + + def test_sendEvents_exception_removes_clear_state_for_discarded(t): + t.eqm.options.eventflushchunksize = 3 + t.eqm.options.maxqueuelen = 2 + t.eqm._initQueues() + events = [{"event": i} for i in range(2)] + + t.eqm.event_queue.extendleft(events) + + def send(args): + t.eqm.event_queue.append({"new_event": 0}) + raise Exception("event_sender_fn failed") + + event_sender_fn = Mock(name="event_sender_fn", side_effect=send) + + t.eqm._removeDiscardedEventFromClearState = create_autospec( + t.eqm._removeDiscardedEventFromClearState, + name="_removeDiscardedEventFromClearState", + ) + + ret = t.eqm.sendEvents(event_sender_fn) + # validate Exception was raised + t.assertEqual(ret.result.check(Exception), Exception) + # quash the unhandled error in differd exception + ret.addErrback(Mock()) + + event_sender_fn.assert_called_with([events[0], events[1]]) + + t.eqm._removeDiscardedEventFromClearState.assert_called_with(events[0]) diff --git a/Products/ZenHub/events/queue/tests/test_misc.py b/Products/ZenHub/events/queue/tests/test_misc.py new file mode 100644 index 0000000000..4a5292a579 --- /dev/null +++ b/Products/ZenHub/events/queue/tests/test_misc.py @@ -0,0 +1,29 @@ +from mock import patch +from unittest import TestCase + +from ..misc import load_utilities + +PATH = {"src": "Products.ZenHub.events.queue.misc"} + + +class load_utilities_Test(TestCase): + @patch("{src}.getUtilitiesFor".format(**PATH), autospec=True) + def test_load_utilities(t, getUtilitiesFor): + ICollectorEventTransformer = "some transform function" + + def func1(): + pass + + def func2(): + pass + + func1.weight = 100 + func2.weight = 50 + getUtilitiesFor.return_value = (("func1", func1), ("func2", func2)) + + ret = load_utilities(ICollectorEventTransformer) + + getUtilitiesFor.assert_called_with(ICollectorEventTransformer) + # NOTE: lower weight comes first in the sorted list + # Is this intentional? + t.assertEqual(ret, [func2, func1]) diff --git a/Products/ZenHub/events/tests/__init__.py b/Products/ZenHub/events/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/Products/ZenHub/hub.zcml b/Products/ZenHub/hub.zcml index f58deafd0e..843cc45884 100644 --- a/Products/ZenHub/hub.zcml +++ b/Products/ZenHub/hub.zcml @@ -9,60 +9,37 @@ # ############################################################################## --> - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + diff --git a/Products/ZenHub/interfaces.py b/Products/ZenHub/interfaces.py index 5e343f54d1..e975401e4b 100644 --- a/Products/ZenHub/interfaces.py +++ b/Products/ZenHub/interfaces.py @@ -7,13 +7,23 @@ # ############################################################################## +from enum import IntEnum from zope.component.interfaces import Interface, IObjectEvent from zope.interface import Attribute -# "Enum" for return values for IInvalidationFilters. -FILTER_EXCLUDE = 0 -FILTER_INCLUDE = 1 -FILTER_CONTINUE = 2 + +class InvalidationFilterResult(IntEnum): + """IInvalidationFilter implementations return one these values.""" + + Exclude = 0 + Include = 1 + Continue = 2 + + +# These names exist for backward compatibility. +FILTER_EXCLUDE = InvalidationFilterResult.Exclude +FILTER_INCLUDE = InvalidationFilterResult.Include +FILTER_CONTINUE = InvalidationFilterResult.Continue class IInvalidationEvent(IObjectEvent): @@ -188,6 +198,21 @@ def generate(event): """ +class IEventService(Interface): + """ + A service that allows the sending of an event. + """ + + def sendEvents(events): + pass + + def sendEvent(event, **kw): + pass + + def sendHeartbeat(event): + pass + + TRANSFORM_CONTINUE = 0 TRANSFORM_STOP = 1 TRANSFORM_DROP = 2 diff --git a/Products/ZenHub/invalidationfilter.py b/Products/ZenHub/invalidationfilter.py index 26f4b552a9..0d5cb48cc1 100644 --- a/Products/ZenHub/invalidationfilter.py +++ b/Products/ZenHub/invalidationfilter.py @@ -13,7 +13,7 @@ from cStringIO import StringIO from hashlib import md5 -from zope.interface import implements +from zope.interface import implementer from Products.ZenModel.DeviceClass import DeviceClass from Products.ZenModel.GraphDefinition import GraphDefinition @@ -29,18 +29,15 @@ from .interfaces import IInvalidationFilter, FILTER_EXCLUDE, FILTER_CONTINUE -log = logging.getLogger("zen.InvalidationFilter") +log = logging.getLogger("zen.{}".format(__name__.split(".")[-1].lower())) +@implementer(IInvalidationFilter) class IgnorableClassesFilter(object): - """ - This filter specifies which classes we want to ignore the - invalidations on. - """ - - implements(IInvalidationFilter) + """Ignore invalidations on certain classes.""" CLASSES_TO_IGNORE = ( + DeviceClass, IpAddress, IpNetwork, GraphDefinition, @@ -60,16 +57,36 @@ def include(self, obj): return FILTER_CONTINUE +_iszorcustprop = re.compile("[zc][A-Z]").match + + +def _getZorCProperties(organizer): + for zId in sorted(organizer.zenPropertyIds(pfilt=_iszorcustprop)): + try: + if organizer.zenPropIsPassword(zId): + propertyString = organizer.getProperty(zId, "") + else: + propertyString = organizer.zenPropertyString(zId) + yield zId, propertyString + except AttributeError: + # ZEN-3666: If an attribute error is raised on a zProperty + # assume it was produced by a zenpack + # install whose daemons haven't been restarted and continue + # excluding the offending property. + log.debug("Excluding '%s' property", zId) + + +@implementer(IInvalidationFilter) class BaseOrganizerFilter(object): """ - Base invalidation filter for organizers. Calculates a checksum for - the organizer based on its sorted z/c properties. - """ + Base invalidation filter for organizers. - implements(IInvalidationFilter) + The default implementation will reject organizers that do not have + updated calculated checksum values. The checksum is calculated using + accumulation of each 'z' and 'c' property associated with organizer. + """ weight = 10 - iszorcustprop = re.compile("[zc][A-Z]").match def __init__(self, types): self._types = types @@ -89,24 +106,9 @@ def initialize(self, context): log.warn("Unable to retrieve object: %s", brain.getPath()) self.checksum_map = results - def getZorCProperties(self, organizer): - for zId in sorted(organizer.zenPropertyIds(pfilt=self.iszorcustprop)): - try: - if organizer.zenPropIsPassword(zId): - propertyString = organizer.getProperty(zId, "") - else: - propertyString = organizer.zenPropertyString(zId) - yield zId, propertyString - except AttributeError: - # ZEN-3666: If an attribute error is raised on a zProperty - # assume it was produced by a zenpack - # install whose daemons haven't been restarted and continue - # excluding the offending property. - log.debug("Excluding '%s' property", zId) - def generateChecksum(self, organizer, md5_checksum): # Checksum all zProperties and custom properties - for zId, propertyString in self.getZorCProperties(organizer): + for zId, propertyString in _getZorCProperties(organizer): md5_checksum.update("%s|%s" % (zId, propertyString)) def organizerChecksum(self, organizer): @@ -135,9 +137,10 @@ def include(self, obj): class DeviceClassInvalidationFilter(BaseOrganizerFilter): """ - Subclass of BaseOrganizerFilter with specific logic for - Device classes. Uses both z/c properties as well as locally - bound RRD templates to create the checksum. + Invalidation filter for DeviceClass organizers. + + Uses both 'z' and 'c' properties as well as locally bound RRD templates + to create the checksum. """ def __init__(self): @@ -167,10 +170,7 @@ def generateChecksum(self, organizer, md5_checksum): class OSProcessOrganizerFilter(BaseOrganizerFilter): - """ - Invalidation filter for OSProcessOrganizer objects. This filter only - looks at z/c properties defined on the organizer. - """ + """Invalidation filter for OSProcessOrganizer objects.""" def __init__(self): super(OSProcessOrganizerFilter, self).__init__((OSProcessOrganizer,)) @@ -181,9 +181,10 @@ def getRoot(self, context): class OSProcessClassFilter(BaseOrganizerFilter): """ - Invalidation filter for OSProcessClass objects. This filter uses - z/c properties as well as local _properties defined on the organizer - to create a checksum. + Invalidation filter for OSProcessClass objects. + + This filter uses 'z' and 'c' properties as well as local _properties + defined on the organizer to create a checksum. """ def __init__(self): diff --git a/Products/ZenHub/invalidationmanager.py b/Products/ZenHub/invalidationmanager.py index 9e57a8dc21..e35eb383ca 100644 --- a/Products/ZenHub/invalidationmanager.py +++ b/Products/ZenHub/invalidationmanager.py @@ -13,7 +13,7 @@ from itertools import chain from functools import wraps -from twisted.internet.defer import inlineCallbacks +from twisted.internet.defer import inlineCallbacks, returnValue from ZODB.POSException import POSKeyError from zope.component import getUtility, getUtilitiesFor, subscribers @@ -36,11 +36,10 @@ ) from .invalidations import INVALIDATIONS_PAUSED -log = logging.getLogger("zen.ZenHub.invalidationmanager") +log = logging.getLogger("zen.zenhub.invalidations") class InvalidationManager(object): - _invalidation_paused_event = { "summary": "Invalidation processing is " "currently paused. To resume, set " @@ -58,7 +57,6 @@ class InvalidationManager(object): def __init__( self, dmd, - log, syncdb, poll_invalidations, send_event, @@ -66,7 +64,6 @@ def __init__( ): self.__dmd = dmd self.__syncdb = syncdb - self.log = log self.__poll_invalidations = poll_invalidations self.__send_event = send_event self.poll_interval = poll_interval @@ -76,41 +73,48 @@ def __init__( self.totalEvents = 0 self.totalTime = 0 - self.initialize_invalidation_filters() + self._invalidation_filters = self.initialize_invalidation_filters( + self.__dmd + ) self.processor = getUtility(IInvalidationProcessor) - log.debug("got InvalidationProcessor %s", self.processor) app = self.__dmd.getPhysicalRoot() self.invalidation_pipeline = InvalidationPipeline( app, self._invalidation_filters, self._queue ) - def initialize_invalidation_filters(self): - """Get Invalidation Filters, initialize them, - store them in the _invalidation_filters list, and return the list + @staticmethod + def initialize_invalidation_filters(ctx): + """ + Return initialized IInvalidationFilter objects in a list. + + :param ctx: Used to initialize the IInvalidationFilter objects. + :type ctx: DataRoot + :return: Initialized IInvalidationFilter objects + :rtype: List[IInvalidationFilter] """ try: filters = (f for n, f in getUtilitiesFor(IInvalidationFilter)) - self._invalidation_filters = [] + invalidation_filters = [] for fltr in sorted( filters, key=lambda f: getattr(f, "weight", 100) ): - fltr.initialize(self.__dmd) - self._invalidation_filters.append(fltr) - self.log.info( - "Registered %s invalidation filters.", - len(self._invalidation_filters), - ) - self.log.info( - "invalidation filters: %s", self._invalidation_filters + fltr.initialize(ctx) + invalidation_filters.append(fltr) + log.info( + "registered %s invalidation filters.", + len(invalidation_filters), ) - return self._invalidation_filters + log.info("invalidation filters: %s", invalidation_filters) + return invalidation_filters except Exception: log.exception("error in initialize_invalidation_filters") @inlineCallbacks def process_invalidations(self): - """Periodically process database changes. - synchronize with the database, and poll invalidated oids from it, + """ + Periodically process database changes. + + Synchronize with the database, and poll invalidated oids from it, filter the oids, send them to the invalidation_processor @return: None @@ -119,35 +123,38 @@ def process_invalidations(self): now = time() yield self._syncdb() if self._paused(): - return + returnValue(None) oids = self._poll_invalidations() if not oids: - log.debug("no invalidations found: oids=%s", oids) - return + log.debug("no invalidations found") + returnValue(None) for oid in oids: yield self.invalidation_pipeline.run(oid) - self.log.debug("Processed %s raw invalidations", len(oids)) - yield self.processor.processQueue(self._queue) + handled, ignored = yield self.processor.processQueue(self._queue) + log.debug( + "processed invalidations " + "handled-count=%d, ignored-count=%d", + handled, + ignored, + ) self._queue.clear() - except Exception: log.exception("error in process_invalidations") finally: self.totalEvents += 1 self.totalTime += time() - now - log.debug("end process_invalidations") @inlineCallbacks def _syncdb(self): try: - self.log.debug("[processQueue] syncing....") + log.debug("syncing with ZODB ...") yield self.__syncdb() - self.log.debug("[processQueue] synced") + log.debug("synced with ZODB") except Exception: - self.log.warn("Unable to poll invalidations, will try again.") + log.warn("Unable to poll invalidations") def _paused(self): if not self._currently_paused: @@ -175,7 +182,7 @@ def _poll_invalidations(self): log.debug("poll invalidations from dmd.storage") return self.__poll_invalidations() except Exception: - log.exception("error in _poll_invalidations") + log.exception("failed to poll invalidations") @inlineCallbacks def _send_event(self, event): diff --git a/Products/ZenHub/invalidationoid.py b/Products/ZenHub/invalidationoid.py index 0d88b916f9..8a55abdcca 100644 --- a/Products/ZenHub/invalidationoid.py +++ b/Products/ZenHub/invalidationoid.py @@ -9,8 +9,8 @@ import logging -from zope.interface import implements -from zope.component import adapts +from zope.interface import implementer +from zope.component import adapter from Products.ZenRelations.PrimaryPathObjectManager import ( PrimaryPathObjectManager, @@ -18,13 +18,12 @@ from .interfaces import IInvalidationOid - -log = logging.getLogger("zen.InvalidationOid") +log = logging.getLogger("zen.{}".format(__name__.split(".")[-1].lower())) +@adapter(PrimaryPathObjectManager) +@implementer(IInvalidationOid) class DefaultOidTransform(object): - implements(IInvalidationOid) - adapts(PrimaryPathObjectManager) def __init__(self, obj): self._obj = obj @@ -33,8 +32,8 @@ def transformOid(self, oid): return oid +# DeviceOidTransform kept for backward compability with vSphere ZenPack. class DeviceOidTransform(object): - implements(IInvalidationOid) def __init__(self, obj): self._obj = obj diff --git a/Products/ZenHub/invalidations.py b/Products/ZenHub/invalidations.py index 45117adb97..16bd8fc854 100644 --- a/Products/ZenHub/invalidations.py +++ b/Products/ZenHub/invalidations.py @@ -9,11 +9,9 @@ import logging -from BTrees.IIBTree import IITreeSet from twisted.internet import defer -from ZODB.utils import u64 from zope.component import adapter, getGlobalSiteManager -from zope.interface import implements, providedBy +from zope.interface import implementer, providedBy from Products.ZenModel.DeviceComponent import DeviceComponent from Products.ZenRelations.PrimaryPathObjectManager import ( @@ -24,50 +22,11 @@ from .interfaces import IInvalidationProcessor, IHubCreatedEvent from .zodb import UpdateEvent, DeletionEvent - -log = logging.getLogger("zen.ZenHub") +log = logging.getLogger("zen.zenhub.invalidations") INVALIDATIONS_PAUSED = "PAUSED" -@defer.inlineCallbacks -def betterObjectEventNotify(event): - """ - This method re-implements zope.component.event.objectEventNotify to give - more time back to the reactor. It is slightly different, but works exactly - the same for our specific use case. - """ - gsm = getGlobalSiteManager() - subscriptions = gsm.adapters.subscriptions( - map(providedBy, (event.object, event)), None - ) - for subscription in subscriptions: - yield giveTimeToReactor(subscription, event.object, event) - - -def handle_oid(dmd, oid): - # Go pull the object out of the database - obj = dmd._p_jar[oid] - # Don't bother with all the catalog stuff; we're depending on primaryAq - # existing anyway, so only deal with it if it actually has primaryAq. - if isinstance(obj, PrimaryPathObjectManager) or isinstance( - obj, DeviceComponent - ): - try: - # Try to get the object - obj = obj.__of__(dmd).primaryAq() - except (AttributeError, KeyError): - # Object has been removed from its primary path (i.e. was - # deleted), so make a DeletionEvent - log.debug("Notifying services that %r has been deleted", obj) - event = DeletionEvent(obj, oid) - else: - # Object was updated, so make an UpdateEvent - log.debug("Notifying services that %r has been updated", obj) - event = UpdateEvent(obj, oid) - # Fire the event for all interested services to pick up - return betterObjectEventNotify(event) - - +@implementer(IInvalidationProcessor) class InvalidationProcessor(object): """ Registered as a global utility. Given a database hook and a list of oids, @@ -75,14 +34,10 @@ class InvalidationProcessor(object): cause collectors to be pushed updates. """ - implements(IInvalidationProcessor) - - _invalidation_queue = None _hub = None _hub_ready = None def __init__(self): - self._invalidation_queue = IITreeSet() self._hub_ready = defer.Deferred() getGlobalSiteManager().registerHandler(self.onHubCreated) @@ -94,26 +49,53 @@ def onHubCreated(self, event): @defer.inlineCallbacks def processQueue(self, oids): yield self._hub_ready - i = 0 - queue = self._invalidation_queue - if self._hub.dmd.pauseHubNotifications: - log.debug("notifications are currently paused") - defer.returnValue(INVALIDATIONS_PAUSED) - for i, oid in enumerate(oids): - ioid = u64(oid) - # Try pushing it into the queue, which is an IITreeSet. - # If it inserted successfully it returns 1, else 0. - if queue.insert(ioid): - # Get the deferred that does the notification - d = self._dispatch(self._hub.dmd, oid, ioid, queue) - yield d - defer.returnValue(i) + handled, ignored = 0, 0 + for oid in oids: + try: + obj = self._hub.dmd._p_jar[oid] + # Don't bother with all the catalog stuff; we're depending on + # primaryAq existing anyway, so only deal with it if it + # actually has primaryAq. + if isinstance( + obj, (PrimaryPathObjectManager, DeviceComponent) + ): + handled += 1 + event = _get_event(self._hub.dmd, obj, oid) + yield _notify_event_subscribers(event) + else: + ignored += 1 + except KeyError: + log.warning("object not found oid=%r", oid) + defer.returnValue((handled, ignored)) + + +def _get_event(dmd, obj, oid): + try: + # Try to get the object + obj = obj.__of__(dmd).primaryAq() + except (AttributeError, KeyError): + # Object has been removed from its primary path (i.e. was + # deleted), so make a DeletionEvent + log.debug("notifying services that %r has been deleted", obj) + return DeletionEvent(obj, oid) + else: + # Object was updated, so make an UpdateEvent + log.debug("notifying services that %r has been updated", obj) + return UpdateEvent(obj, oid) + - def _dispatch(self, dmd, oid, ioid, queue): - """ - Send to all the services that care by firing events. - """ +@defer.inlineCallbacks +def _notify_event_subscribers(event): + gsm = getGlobalSiteManager() + subscriptions = gsm.adapters.subscriptions( + map(providedBy, (event.object, event)), None + ) + for subscription in subscriptions: try: - return handle_oid(dmd, oid) - finally: - queue.remove(ioid) + yield giveTimeToReactor(subscription, event.object, event) + except Exception: + log.exception( + "failure in suscriber subscriber=%r event=%r", + subscription, + event, + ) diff --git a/Products/ZenHub/localserver/__init__.py b/Products/ZenHub/localserver/__init__.py new file mode 100644 index 0000000000..e092f23b1f --- /dev/null +++ b/Products/ZenHub/localserver/__init__.py @@ -0,0 +1,12 @@ +from .errors import ErrorResponse, NotFound +from .resource import ZenResource +from .server import LocalServer +from .zhstatus import ZenHubStatus + +__all__ = ( + "ErrorResponse", + "LocalServer", + "NotFound", + "ZenHubStatus", + "ZenResource", +) diff --git a/Products/ZenHub/localserver/errors.py b/Products/ZenHub/localserver/errors.py new file mode 100644 index 0000000000..2af3a4ed29 --- /dev/null +++ b/Products/ZenHub/localserver/errors.py @@ -0,0 +1,21 @@ +import json + +from twisted.web.resource import Resource +from twisted.web._responses import NOT_FOUND + + +class ErrorResponse(Resource): + def __init__(self, code, detail): + Resource.__init__(self) + self.code = code + self.detail = detail + + def render(self, request): + request.setResponseCode(self.code) + request.setHeader(b"content-type", b"application/json; charset=utf-8") + return json.dumps({"error": self.code, "message": self.detail}) + + +class NotFound(ErrorResponse): + def __init__(self): + ErrorResponse.__init__(self, NOT_FOUND, "resource not found") diff --git a/Products/ZenHub/localserver/options.py b/Products/ZenHub/localserver/options.py new file mode 100644 index 0000000000..d7db14185a --- /dev/null +++ b/Products/ZenHub/localserver/options.py @@ -0,0 +1,10 @@ + + +def add_options(parser): + parser.add_option( + "--localport", + dest="localport", + type="int", + default=14682, + help="The app responds to localhost HTTP connections on this port", + ) diff --git a/Products/ZenHub/localserver/resource.py b/Products/ZenHub/localserver/resource.py new file mode 100644 index 0000000000..c40ab06164 --- /dev/null +++ b/Products/ZenHub/localserver/resource.py @@ -0,0 +1,27 @@ +import logging + +from twisted.web.resource import Resource +from twisted.web._responses import INTERNAL_SERVER_ERROR + +from .errors import ErrorResponse, NotFound + + +class ZenResource(Resource): + def __init__(self): + Resource.__init__(self) + name = self.__class__.__name__.lower() + self.log = logging.getLogger("zen.localserver.%s" % (name,)) + + def getChild(self, path, request): + return NotFound() + + def render(self, request): + try: + response = Resource.render(self, request) + if isinstance(response, Resource): + return response.render(request) + return response + except Exception: + return ErrorResponse( + INTERNAL_SERVER_ERROR, "unexpected problem" + ).render(request) diff --git a/Products/ZenHub/localserver/server.py b/Products/ZenHub/localserver/server.py new file mode 100644 index 0000000000..9d223d9702 --- /dev/null +++ b/Products/ZenHub/localserver/server.py @@ -0,0 +1,48 @@ +import logging + +from twisted.web.server import Site + +from .resource import ZenResource +from .options import add_options + + +class LocalServer(object): + """ + Server class to listen to local connections. + """ + + buildOptions = staticmethod(add_options) + + def __init__(self, reactor, endpoint): + self.__reactor = reactor + self.__endpoint = endpoint + + root = ZenResource() + self.__site = Site(root) + + self.__listener = None + self.__log = logging.getLogger("zen.localserver") + + def add_resource(self, name, resource): + self.__site.resource.putChild(name, resource) + + def start(self): + """Start listening.""" + d = self.__endpoint.listen(self.__site) + d.addCallbacks(self._success, self._failure) + + def stop(self): + if self._listener: + self._listener.stopListening() + + def _success(self, listener): + self.__log.info("opened localhost port %d", self.__endpoint._port) + self._listener = listener + + def _failure(self, error): + self.__log.error( + "failed to open local port port=%s error=%r", + self.__endpoint._port, + error, + ) + self.__reactor.stop() diff --git a/Products/ZenHub/localserver/zhstatus.py b/Products/ZenHub/localserver/zhstatus.py new file mode 100644 index 0000000000..7fe15c2472 --- /dev/null +++ b/Products/ZenHub/localserver/zhstatus.py @@ -0,0 +1,22 @@ +from twisted.web._responses import INTERNAL_SERVER_ERROR + +from .errors import ErrorResponse +from .resource import ZenResource + + +class ZenHubStatus(ZenResource): + def __init__(self, statusgetter): + ZenResource.__init__(self) + self._getstatus = statusgetter + + def render_GET(self, request): + try: + request.responseHeaders.addRawHeader( + b"content-type", b"text/plain; charset=utf-8" + ) + return self._getstatus() + except Exception: + self.log.exception("failed to get ZenHub connection status") + return ErrorResponse( + INTERNAL_SERVER_ERROR, "zenhub status unavailable" + ) diff --git a/Products/ZenHub/metricpublisher/publisher.py b/Products/ZenHub/metricpublisher/publisher.py index 8df570a6bd..0d90b6a11b 100644 --- a/Products/ZenHub/metricpublisher/publisher.py +++ b/Products/ZenHub/metricpublisher/publisher.py @@ -21,7 +21,7 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IBodyProducer from txredis import RedisClientFactory -from zope.interface import implements +from zope.interface import implementer from Products.ZenUtils.MetricServiceRequest import getPool @@ -447,10 +447,10 @@ def _put(self, scheduled): return self._make_request() +@implementer(IBodyProducer) class StringProducer(object): - implements(IBodyProducer) """ - Implements twisted interface for writing a string to HTTP output stream + Implements twisted interface for writing a string to HTTP output stream. """ def __init__(self, postBody): diff --git a/Products/ZenHub/metricpublisher/utils.py b/Products/ZenHub/metricpublisher/utils.py index 3356a72e47..93d41a5a85 100644 --- a/Products/ZenHub/metricpublisher/utils.py +++ b/Products/ZenHub/metricpublisher/utils.py @@ -30,7 +30,7 @@ def inner(*args, **kwargs): return f(*args, **kwargs) except exception: failures += 1 - slots = ((2**failures) - 1) / 2.0 + slots = ((2 ** failures) - 1) / 2.0 mdelay = min(max(slots * mdelay, mdelay), maxdelay) sleepfunc(mdelay) diff --git a/Products/ZenHub/pinger.py b/Products/ZenHub/pinger.py new file mode 100644 index 0000000000..d98c2d871f --- /dev/null +++ b/Products/ZenHub/pinger.py @@ -0,0 +1,55 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from twisted.internet import defer, task + +from .errors import HubDown + + +class PingZenHub(object): + """Simple task to ping ZenHub.""" + + def __init__(self, zenhub, interval=60): + """ + Initialize a PingZenHub instance. + + @type zenhub: ZenHubClient + @param interval: The number seconds between each ping. + @type interval: float + """ + self._zenhub = zenhub + self._interval = interval + self._loop = self._loopd = None + self._log = logging.getLogger("zen.zenhub.ping") + + def start(self): + self._loop = task.LoopingCall(self) + self._loopd = self._loop.start(self._interval, now=False) + + def stop(self): + if self._loop is None: + return + self._loop.stop() + self._loop = self._loopd = None + + @defer.inlineCallbacks + def __call__(self): + # type: () -> defer.Deferred + """Ping zenhub""" + try: + response = yield self._zenhub.ping() + self._log.debug("pinged zenhub: %s", response) + except HubDown: + self._log.warning("no connection to zenhub") + except Exception as ex: + self._log.error("ping failed: %s", ex) diff --git a/Products/ZenHub/server/avatar.py b/Products/ZenHub/server/avatar.py index fa777751a8..e3010caa00 100644 --- a/Products/ZenHub/server/avatar.py +++ b/Products/ZenHub/server/avatar.py @@ -12,12 +12,12 @@ import logging import os -from uuid import uuid4 from twisted.spread import pb -from ..PBDaemon import RemoteBadMonitor +from ..errors import RemoteBadMonitor from .exceptions import UnknownServiceError from .utils import getLogger +from .worker import Worker class HubAvatar(pb.Avatar): @@ -77,48 +77,78 @@ def perspective_getService( service.addListener(listener, options) return service - def perspective_reportingForWork(self, worker, workerId, worklistId): + def perspective_reportForWork(self, remote, name, worklistId): """Allow a worker to register for work. - :param worker: Reference to zenhubworker - :type worker: twisted.spread.pb.RemoteReference - :param int workerId: The worker's identifier + :param workerref: Reference to zenhubworker + :type workerref: twisted.spread.pb.RemoteReference + :param str name: The name of the worker :param str worklistId: The worker will work jobs from this worklist :rtype: None """ - worker.workerId = workerId - worker.sessionId = uuid4() - pool = self.__pools.get(worklistId) - if pool is None: - self.__log.error( - "Worker asked to work unknown worklist " - "worker=%s worklist=%s", - workerId, - worklistId, - ) - raise pb.Error("No such worklist: %s" % worklistId) - worker.queue_name = worklistId + pool = self._get_pool(worklistId, name) + worker = Worker(name=name, remote=remote) try: pool.add(worker) + pool.ready(worker) except Exception as ex: - self.__log.exception("Failed to add worker worker=%s", workerId) + self.__log.exception( + "failed to add worker worker=%s worklist=%s", name, worklistId + ) raise pb.Error( "Internal ZenHub error: %s: %s" % (ex.__class__, ex), ) self.__log.info( - "Worker ready to work worker=%s session=%s worklist=%s", - workerId, - worker.sessionId.hex, - worklistId, + "registered worker worker=%s worklist=%s", name, worklistId + ) + remote.notifyOnDisconnect( + lambda ref, n=name, q=worklistId: self._remove_worker(ref, n, q) ) - def removeWorker(worker): - pool = self.__pools.get(worker.queue_name) + def perspective_resignFromWork(self, name, worklistId): + """Allow a worker to unregister itself from work. + + :param str name: The name of the worker + :param str worklistId: The worker will work jobs from this worklist + :rtype: None + """ + pool = self._get_pool(worklistId, name) + worker = self._get_worker(pool, name, worklistId) + if worker is not None: pool.remove(worker) + del worker # maybe this works...? self.__log.info( - "Worker disconnected worker=%s session=%s", - worker.workerId, - worker.sessionId.hex, + "unregistered worker worker=%s worklist=%s", name, worklistId + ) + + def _get_pool(self, worklistId, name): + pool = self.__pools.get(worklistId) + if pool is None: + self.__log.error( + "worker asked to resign from unknown worklist " + "worker=%s worklist=%s", + name, + worklistId, + ) + raise pb.Error("No such worklist: %s" % worklistId) + return pool + + def _get_worker(self, pool, name, worklistId): + worker = pool.get(name) + if worker is None: + self.__log.debug( + "unknown worker worker=%s worklist=%s", name, worklistId ) + return worker - worker.notifyOnDisconnect(removeWorker) + def _remove_worker(self, remote, name, worklistId): + # Note that 'remote' is ignored. + pool = self.__pools.get(worklistId) + if pool is None: + return + worker = self._get_worker(pool, name, worklistId) + if worker is not None: + pool.remove(worker) + self.__log.info( + "worker disconnected worker=%s worklist=%s", name, worklistId + ) diff --git a/Products/ZenHub/server/events.py b/Products/ZenHub/server/events.py index 706dc62005..07df019e08 100644 --- a/Products/ZenHub/server/events.py +++ b/Products/ZenHub/server/events.py @@ -9,6 +9,9 @@ from __future__ import absolute_import +import attr + +from attr.validators import instance_of from zope.interface import implementer from ..interfaces import IServiceAddedEvent @@ -17,6 +20,7 @@ IServiceCallStartedEvent, IServiceCallCompletedEvent, ) +from .priority import ServiceCallPriority from .utils import UNSPECIFIED as _UNSPECIFIED @@ -38,83 +42,61 @@ class ReportWorkerStatus(object): """An event to signal zenhubworkers to report their status.""" -class ServiceCallEvent(object): - """Base class for ServiceCall* event classes.""" - - __slots__ = () - - def __init__(self, **kwargs): - for name in self.__slots__: - setattr(self, name, kwargs.pop(name, None)) - # no left-over arguments - assert len(kwargs) == 0, "[%r] invalid arguments" % self - super(ServiceCallEvent, self).__init__() +@attr.s(slots=True, frozen=True) +class _ReceivedData(object): + id = attr.ib(converter=str) + monitor = attr.ib(converter=str) + service = attr.ib(converter=str) + method = attr.ib(converter=str) + args = attr.ib() + kwargs = attr.ib() + timestamp = attr.ib(validator=instance_of(float)) + queue = attr.ib() + priority = attr.ib(validator=instance_of(ServiceCallPriority)) + + +@attr.s(slots=True, frozen=True) +class _StartedData(_ReceivedData): + worker = attr.ib(converter=str) + attempts = attr.ib(converter=int) + + @attempts.validator + def _non_zero(self, attribute, value): + if value < 1: + raise ValueError("attempts must be an integer greater than zero") + + +@attr.s(slots=True, frozen=True) +class _CompletedData(_StartedData): + retry = attr.ib(default=_UNSPECIFIED) + error = attr.ib(default=_UNSPECIFIED) + result = attr.ib(default=_UNSPECIFIED) + + def __attrs_post_init__(self): + unspecified = tuple( + name + for name in ("result", "error", "retry") + if getattr(self, name) is _UNSPECIFIED + ) + if len(unspecified) != 2: + raise TypeError( + "At least one of fields 'result', 'retry', and 'error' " + "must be given an argument" + ) + for name in unspecified: + object.__setattr__(self, name, None) @implementer(IServiceCallReceivedEvent) -class ServiceCallReceived(ServiceCallEvent): +class ServiceCallReceived(_ReceivedData): """ZenHub has accepted a request to execute a method on a service.""" - __slots__ = ( - "id", - "monitor", - "service", - "method", - "args", - "kwargs", - "timestamp", - "queue", - "priority", - ) - @implementer(IServiceCallStartedEvent) -class ServiceCallStarted(ServiceCallEvent): +class ServiceCallStarted(_StartedData): """ZenHub has started executing a method on a service.""" - __slots__ = ServiceCallReceived.__slots__ + ("worker", "attempts") - - def __init__(self, **kwargs): - assert kwargs.get("attempts") is not None, "attempts is unspecified" - assert kwargs["attempts"] > 0, "attempts is less than 1" - super(ServiceCallStarted, self).__init__(**kwargs) - @implementer(IServiceCallCompletedEvent) -class ServiceCallCompleted(ServiceCallEvent): +class ServiceCallCompleted(_CompletedData): """ZenHub has completed executing a method on a service.""" - - __slots__ = ServiceCallStarted.__slots__ + ("retry", "error", "result") - - def __init__(self, **kwargs): - assert kwargs.get("attempts") is not None, "attempts is unspecified" - assert kwargs["attempts"] > 0, "attempts is less than 1" - error = kwargs.get("error", _UNSPECIFIED) - retry = kwargs.get("retry", _UNSPECIFIED) - result = kwargs.get("result", _UNSPECIFIED) - assert any( - ( - all( - ( - (result is not _UNSPECIFIED), - (error is _UNSPECIFIED), - (retry is _UNSPECIFIED), - ) - ), - all( - ( - (result is _UNSPECIFIED), - (error is not _UNSPECIFIED), - (retry is _UNSPECIFIED), - ) - ), - all( - ( - (result is _UNSPECIFIED), - (error is _UNSPECIFIED), - (retry is not _UNSPECIFIED), - ) - ), - ) - ), "[completed] Fields result, retry, and error all unspecified" - super(ServiceCallCompleted, self).__init__(**kwargs) diff --git a/Products/ZenHub/server/executors/event.py b/Products/ZenHub/server/executors/event.py index 4ae664fb06..3696a63d29 100644 --- a/Products/ZenHub/server/executors/event.py +++ b/Products/ZenHub/server/executors/event.py @@ -11,6 +11,8 @@ import time +import attr + from twisted.internet import defer from zope.component import getUtility from zope.event import notify @@ -81,7 +83,7 @@ def submit(self, call): ) # Build args for events - ctx = dict(call) + ctx = attr.asdict(call) ctx.update( { "queue": self.__name, diff --git a/Products/ZenHub/server/executors/tests/test_workers.py b/Products/ZenHub/server/executors/tests/test_workers.py index 4ebc1d0592..11e76e5354 100644 --- a/Products/ZenHub/server/executors/tests/test_workers.py +++ b/Products/ZenHub/server/executors/tests/test_workers.py @@ -9,8 +9,14 @@ from __future__ import absolute_import +import time + from unittest import TestCase + +import attr + from mock import ( + ANY, MagicMock, Mock, NonCallableMagicMock, @@ -20,6 +26,7 @@ from twisted.python.failure import Failure from twisted.spread import pb +# from Products.ZenHub.errors import RemoteException from Products.ZenHub.server.config import ModuleObjectConfig from Products.ZenHub.server.service import ServiceCall from Products.ZenHub.server.worklist import ZenHubWorklist @@ -27,19 +34,19 @@ from Products.ZenHub.server.utils import subTest from ..workers import ( - banana, - jelly, RemoteException, - _Running, + Scheduler, + TaskDispatcher, ServiceCallPriority, ServiceCallTask, WorkerPoolExecutor, + _to_internal_error, ) PATH = {"src": "Products.ZenHub.server.executors.workers"} -class WorkerPoolExecutorTest(TestCase): # noqa: D101 +class WorkerPoolExecutorTest(TestCase): def setUp(t): t.getLogger_patcher = patch( "{src}.getLogger".format(**PATH), @@ -48,22 +55,29 @@ def setUp(t): t.getLogger = t.getLogger_patcher.start() t.addCleanup(t.getLogger_patcher.stop) - t._Running_patcher = patch( - "{src}._Running".format(**PATH), + t.loopingCall_patcher = patch( + "{src}.LoopingCall".format(**PATH), autospec=True, ) - t._Running = t._Running_patcher.start() - t.addCleanup(t._Running_patcher.stop) + t.loopingCall = t.loopingCall_patcher.start() + t.addCleanup(t.loopingCall_patcher.stop) + + t.notify_patcher = patch( + "{src}.notify".format(**PATH), + autospec=True, + ) + t.notify = t.notify_patcher.start() + t.addCleanup(t.notify_patcher.stop) t.reactor = Mock(spec=reactor) t.worklist = NonCallableMagicMock(spec=ZenHubWorklist) - t.workers = NonCallableMagicMock(spec=WorkerPool) + t.pool = NonCallableMagicMock(spec=WorkerPool) t.name = "default" t.executor = WorkerPoolExecutor( t.name, t.worklist, - t.workers, + t.pool, ) t.logger = t.getLogger(t.executor) @@ -86,57 +100,53 @@ def test_create(t, _zhwlist, _ps, _mp): _zhwlist.assert_called_once_with(_ps.return_value) t.assertIsInstance(result, WorkerPoolExecutor) t.assertEqual(result.name, t.name) - t.assertIs(result._WorkerPoolExecutor__worklist, _zhwlist.return_value) - t.assertIs(result._WorkerPoolExecutor__workers, pool) - - def test_initial_state(self): - self.assertEqual(self.name, self.executor.name) - self.assertEqual(self.workers, self.executor.pool) - - call = Mock(spec=ServiceCall) - handler = Mock() - - dfr = self.executor.submit(call) - dfr.addErrback(handler) - - f = handler.call_args[0][0] - self.assertIsInstance(f, Failure) - self.assertIsInstance(f.value, pb.Error) - self.assertEqual("ZenHub not ready.", str(f.value)) - self._Running.assert_called_once_with( - self.name, - self.worklist, - self.workers, - self.logger, - ) + t.assertIs(result._worklist, _zhwlist.return_value) + t.assertIs(result._pool, pool) def test_create_requires_pool_and_config_args(t): cases = { - "no args": {}, - "missing 'config'": {"pool": Mock()}, - "missing 'pool'": {"config": Mock()}, + "no args": {"config": None, "pool": None}, + "missing 'config'": {"config": None, "pool": Mock()}, + "missing 'pool'": {"config": Mock(), "pool": None}, } for name, params in cases.items(): with subTest(case=name): with t.assertRaises(ValueError): WorkerPoolExecutor.create(t.name, **params) - def test_start(self): - call = Mock(spec=ServiceCall) - running_state = self._Running.return_value + def test_initial_state(t): + t.assertEqual(t.name, t.executor.name) + t.assertEqual(t.pool, t.executor.pool) + t.assertEqual(t.worklist, t.executor.worklist) - self.executor.start(self.reactor) - dfr = self.executor.submit(call) + def test_start(t): + t.executor.start(t.reactor) - self.assertEqual(dfr, running_state.submit.return_value) + t.assertTrue(t.executor.running) + scheduler = t.executor.scheduler + t.assertIsInstance(scheduler, Scheduler) + t.assertIs(scheduler.reactor, t.reactor) + t.assertEqual(scheduler.name, t.name) + t.assertEqual(scheduler.workers, t.pool) + t.assertEqual(scheduler.worklist, t.worklist) def test_stop(t): + t.executor.stop() + t.assertIsNone(t.executor.scheduler) + t.assertFalse(t.executor.running) + + def test_stop_after_start(t): + t.executor.start(t.reactor) + t.executor.stop() + t.assertIsNone(t.executor.scheduler) + t.assertFalse(t.executor.running) + + def test_submit_on_unstarted_executor(t): """ Submit returns a deferred.failure object if the executor is stopped. """ call = Mock(spec=ServiceCall) - t.executor.stop() dfr = t.executor.submit(call) handler = Mock(name="errback handler") dfr.addErrback(handler) # silence 'unhandled error in deffered' @@ -146,745 +156,427 @@ def test_stop(t): t.assertIsInstance(f.value, pb.Error) t.assertEqual("ZenHub not ready.", str(f.value)) + def test_submit_on_running_executor(t): + t.executor.start(t.reactor) -class BaseRunning(object): - """Base for the Running*Test classes. + call = Mock(spec=ServiceCall) - The setUp() method contains common setup code all tests use. - """ + dfr = t.executor.submit(call) - def setUp(self): - super(BaseRunning, self).setUp() - self.getLogger_patcher = patch( + t.assertFalse(dfr.called) + t.notify.assert_called_once_with(ANY) + t.worklist.push.assert_called_once_with(ANY, ANY) + + +class SchedulerTest(TestCase): + def setUp(t): + t.getLogger_patcher = patch( "{src}.getLogger".format(**PATH), autospec=True, ) - self.getLogger = self.getLogger_patcher.start() - self.addCleanup(self.getLogger_patcher.stop) + t.getLogger = t.getLogger_patcher.start() + t.addCleanup(t.getLogger_patcher.stop) - self.getUtility_patcher = patch( - "{src}.getUtility".format(**PATH), + t.deferLater_patcher = patch( + "{src}.deferLater".format(**PATH), autospec=True, ) - self.getUtility = self.getUtility_patcher.start() - self.addCleanup(self.getUtility_patcher.stop) + t.deferLater = t.deferLater_patcher.start() + t.addCleanup(t.deferLater_patcher.stop) - self.LoopingCall_patcher = patch( - "{src}.LoopingCall".format(**PATH), + t.taskDispatcher_patcher = patch( + "{src}.TaskDispatcher".format(**PATH), autospec=True, ) - self.LoopingCall = self.LoopingCall_patcher.start() - self.addCleanup(self.LoopingCall_patcher.stop) - - self.max_retries = self.getUtility.return_value.task_max_retries - self.logger = self.getLogger.return_value - self.loop = self.LoopingCall.return_value - self.workers = NonCallableMagicMock(spec=WorkerPool) - self.worklist = NonCallableMagicMock(spec=ZenHubWorklist) - self.name = "default" - self.running = _Running( - self.name, - self.worklist, - self.workers, - self.logger, - ) - self.reactor = Mock(spec=reactor) + t.taskDispatcher = t.taskDispatcher_patcher.start() + t.addCleanup(t.taskDispatcher_patcher.stop) + t.reactor = Mock(spec=reactor) + t.worklist = NonCallableMagicMock(spec=ZenHubWorklist) + t.pool = NonCallableMagicMock(spec=WorkerPool) + t.name = "default" -class RunningTest(BaseRunning, TestCase): - """Test the _Running class.""" + t.sched = Scheduler(t.reactor, t.name, t.worklist, t.pool) + t.logger = t.getLogger(t.sched) - def test_initial_state(self): - self.LoopingCall.assert_called_once_with(self.running.dispatch) - self.assertIs(self.running.log, self.logger) - self.assertIs(self.running.name, self.name) - self.assertIs(self.running.worklist, self.worklist) - self.assertIs(self.running.workers, self.workers) - self.assertIs(self.running.task_max_retries, self.max_retries) - self.assertIs(self.running.loop, self.loop) + def test_initialized_attributes(t): + t.assertIs(t.reactor, t.sched.reactor) + t.assertIs(t.name, t.sched.name) + t.assertIs(t.worklist, t.sched.worklist) + t.assertIs(t.pool, t.sched.workers) - def test_start(self): - self.running.start(self.reactor) - self.assertIs(self.running.reactor, self.reactor) - self.loop.start.assert_called_once_with(0) + def test_nominal_task_success(t): + call = MagicMock(spec=ServiceCall) + worklist_name = "default" + retries = 3 - def test_stop(self): - self.running.stop() - self.loop.stop.assert_called_once_with() + task = ServiceCallTask( + call=call, worklist=worklist_name, max_retries=retries + ) + task.mark_success(True) + t.worklist.pop.return_value = defer.succeed(task) - @patch("{src}.ServiceCallTask".format(**PATH), autospec=True) - def test_submit(self, _ServiceCallTask): - self.running.start(self.reactor) + worker = Mock(spec=["name"]) + t.pool.hire.return_value = defer.succeed(worker) - call = Mock(spec=ServiceCall) - task = _ServiceCallTask.return_value - expected_priority = task.priority - expected_dfr = task.deferred - - dfr = self.running.submit(call) - - self.assertIs(expected_dfr, dfr) - _ServiceCallTask.assert_called_once_with(self.name, call) - self.worklist.push.assert_called_once_with(expected_priority, task) - self.reactor.callLater.assert_not_called() - - def test_dispatch_no_tasks(self): - self.running.start(self.reactor) - - task_dfr = defer.Deferred() - self.worklist.pop.return_value = task_dfr - - dfr = self.running.dispatch() - # Cancel the defer after the test completes to avoid leaving - # uncollected garbage. - self.addCleanup(dfr.cancel) - - self.assertIsInstance(dfr, defer.Deferred) - self.worklist.pop.assert_called_once_with() - self.workers.hire.assert_not_called() - self.reactor.callLater.assert_not_called() - - def test_dispatch_no_workers(self): - self.running.start(self.reactor) - - worker_dfr = defer.Deferred() - self.workers.hire.return_value = worker_dfr - - dfr = self.running.dispatch() - # Cancel the defer after the test completes to avoid leaving - # uncollected garbage. - self.addCleanup(dfr.cancel) - - self.assertFalse(worker_dfr.called) - self.assertIsInstance(dfr, defer.Deferred) - self.worklist.pop.assert_called_once_with() - self.workers.hire.assert_called_once_with() - self.reactor.callLater.assert_not_called() - - def test_dispatch_worker_hire_failure(self): - self.running.start(self.reactor) - - self.worklist.__len__.return_value = 1 - self.workers.hire.side_effect = Exception("boom") - - handler = Mock() - dfr = self.running.dispatch() - dfr.addErrback(handler) - # Cancel the defer after the test completes to avoid leaving - # uncollected garbage. - self.addCleanup(dfr.cancel) - - self.assertIsInstance(dfr, defer.Deferred) - self.worklist.pop.assert_called_once_with() - self.workers.hire.assert_called_once_with() - self.logger.exception.assert_called_once_with( - "Unexpected failure worklist=%s", - self.name, - ) + dispatch_deferred = defer.succeed(None) + t.deferLater.return_value = dispatch_deferred - handler.assert_not_called() - self.reactor.callLater.assert_not_called() - self.logger.info.assert_not_called() - self.logger.warn.assert_not_called() - self.logger.error.assert_not_called() - self.workers.layoff.assert_not_called() - self.worklist.pushfront.assert_not_called() - self.worklist.push.assert_not_called() - - def test_dispatch_pop_failure(self): - self.running.start(self.reactor) - - self.worklist.pop.side_effect = Exception("boom") - - handler = Mock() - dfr = self.running.dispatch() - dfr.addErrback(handler) - # Cancel the defer after the test completes to avoid leaving - # uncollected garbage. - self.addCleanup(dfr.cancel) - - self.assertIsInstance(dfr, defer.Deferred) - self.worklist.pop.assert_called_once_with() - self.logger.exception.assert_called_once_with( - "Unexpected failure worklist=%s", - self.name, - ) - self.workers.hire.assert_not_called() - self.workers.layoff.assert_not_called() + t.sched() - handler.assert_not_called() - self.logger.info.assert_not_called() - self.logger.warn.assert_not_called() - self.logger.error.assert_not_called() - self.worklist.pushfront.assert_not_called() - self.worklist.push.assert_not_called() + t.taskDispatcher.assert_called_once_with(worker, task) + t.assertFalse(t.worklist.pushfront.called) + t.pool.ready.assert_called_once_with(worker) - def test__log_initial_start(self): - call = Mock(spec=ServiceCall) - task = Mock(spec=ServiceCallTask) - task.call = call - task.received_tm = 10 - task.started_tm = 20 - task.workerId = "default_0" - - self.running._log_initial_start(task) - - self.logger.info.assert_called_once_with( - "Begin task service=%s method=%s id=%s worker=%s waited=%0.2f", - call.service, - call.method, - call.id.hex, - "default_0", - 10, - ) + def test_nominal_task_failure(t): + call = MagicMock(spec=ServiceCall) + worklist_name = "default" + retries = 3 - def test__log_subsequent_starts(self): - call = Mock(spec=ServiceCall) - task = Mock(spec=ServiceCallTask) - task.call = call - task.attempt = 1 - task.completed_tm = 10 - task.started_tm = 30 - task.workerId = "default_0" - - self.running._log_subsequent_starts(task) - - self.logger.info.assert_called_once_with( - "Retry task service=%s method=%s id=%s " - "worker=%s attempt=%s waited=%0.2f", - call.service, - call.method, - call.id.hex, - "default_0", - task.attempt, - 20, + task = ServiceCallTask( + call=call, worklist=worklist_name, max_retries=retries ) + task.mark_failure(RuntimeError("boom")) + t.worklist.pop.return_value = defer.succeed(task) - def test__log_complete(self): - call = Mock(spec=ServiceCall) - task = Mock(spec=ServiceCallTask) - task.call = call - task.error = None - task.received_tm = 10 - task.started_tm = 20 - task.completed_tm = 30 - task.workerId = "default_0" - - self.running._log_completed(task) - - self.logger.info.assert_called_once_with( - "Completed task service=%s method=%s id=%s " - "worker=%s status=%s duration=%0.2f lifetime=%0.2f", - call.service, - call.method, - call.id.hex, - task.workerId, - "success", - 10, - 20, - ) + worker = Mock(spec=["name"]) + t.pool.hire.return_value = defer.succeed(worker) + dispatch_deferred = defer.succeed(None) + t.deferLater.return_value = dispatch_deferred -class RunningHandleMethodsTest(BaseRunning, TestCase): - """Test the _handle_* methods on the _Running class.""" + t.sched() - def setUp(self): - super(RunningHandleMethodsTest, self).setUp() - methods_to_patch = ( - "_log_initial_start", - "_log_subsequent_starts", - "_log_incomplete", - "_log_completed", - ) - self.patchers = [] - self.patches = {} - for method in methods_to_patch: - patcher = patch.object(self.running, method) - self.patches[method] = patcher.start() - self.addCleanup(patcher.stop) - self.patchers.append(patcher) - self.running.start(self.reactor) - - def test__handle_start_first_attempt(self): - task = Mock(spec=["attempt", "started", "call"]) - task.attempt = 0 - workerId = 1 - - self.running._handle_start(task, workerId) - - self.assertEqual(1, task.attempt) - task.started.assert_called_once_with(workerId) - - self.patches["_log_initial_start"].assert_called_once_with(task) - self.patches["_log_subsequent_starts"].assert_not_called() - self.patches["_log_incomplete"].assert_not_called() - self.patches["_log_completed"].assert_not_called() - - def test__handle_start_later_attempts(self): - task = Mock(spec=["attempt", "started", "call"]) - task.attempt = 1 - workerId = 1 - - self.running._handle_start(task, workerId) - - self.assertEqual(2, task.attempt) - task.started.assert_called_once_with(workerId) - - self.patches["_log_initial_start"].assert_not_called() - self.patches["_log_subsequent_starts"].assert_called_once_with(task) - self.patches["_log_incomplete"].assert_not_called() - self.patches["_log_completed"].assert_not_called() - - def test__handle_error_with_retries(self): - _handle_failure_patch = patch.object(self.running, "_handle_failure") - _handle_failure = _handle_failure_patch.start() - self.addCleanup(_handle_failure_patch.stop) - - _handle_retry_patch = patch.object(self.running, "_handle_retry") - _handle_retry = _handle_retry_patch.start() - self.addCleanup(_handle_retry_patch.stop) - - task = Mock(spec=["attempt"]) - task.attempt = 1 - self.running.task_max_retries = 3 - error = Exception() - - self.running._handle_error(task, error) - - _handle_failure.assert_not_called() - _handle_retry.assert_called_once_with(task, error) - self.patches["_log_initial_start"].assert_not_called() - self.patches["_log_subsequent_starts"].assert_not_called() - self.patches["_log_incomplete"].assert_not_called() - self.patches["_log_completed"].assert_not_called() - - @patch("{src}.pb.Error".format(**PATH), autospec=True) - def test__handle_error_no_retries(self, _Error): - _handle_failure_patch = patch.object(self.running, "_handle_failure") - _handle_failure = _handle_failure_patch.start() - self.addCleanup(_handle_failure_patch.stop) - - _handle_retry_patch = patch.object(self.running, "_handle_retry") - _handle_retry = _handle_retry_patch.start() - self.addCleanup(_handle_retry_patch.stop) - - task = Mock(spec=["attempt", "call"]) - task.attempt = 3 - self.running.task_max_retries = 3 - error = Exception() - - self.running._handle_error(task, error) - - _handle_failure.assert_called_once_with(task, _Error.return_value) - _handle_retry.assert_not_called() - self.patches["_log_initial_start"].assert_not_called() - self.patches["_log_subsequent_starts"].assert_not_called() - self.patches["_log_incomplete"].assert_not_called() - self.patches["_log_completed"].assert_not_called() - - @patch("{src}.notify".format(**PATH), autospec=True) - def test__handle_retry(self, _notify): - task = Mock(spec=["success", "completed", "workerId", "call"]) - exception = Mock() - - self.running._handle_retry(task, exception) - - task.completed.assert_called_once_with(retry=exception) - _notify.assert_called_once_with(task.completed.return_value) - - self.patches["_log_initial_start"].assert_not_called() - self.patches["_log_subsequent_starts"].assert_not_called() - self.patches["_log_completed"].assert_not_called() - self.patches["_log_incomplete"].assert_called_once_with(task) - - @patch("{src}.notify".format(**PATH), autospec=True) - def test__handle_success(self, _notify): - task = Mock(spec=["success", "completed", "workerId", "call"]) - result = Mock() + t.taskDispatcher.assert_called_once_with(worker, task) + t.assertFalse(t.worklist.pushfront.called) + t.pool.ready.assert_called_once_with(worker) - self.running._handle_success(task, result) + # silence 'Unhandled error in Deferred' + task.deferred.addErrback(lambda x: None) - task.success.assert_called_once_with(result) - task.completed.assert_called_once_with(result=result) - _notify.assert_called_once_with(task.completed.return_value) + def test_task_retry(t): + call = MagicMock(spec=ServiceCall) + worklist_name = "default" + retries = 3 + task = ServiceCallTask( + call=call, worklist=worklist_name, max_retries=retries + ) + task.mark_retry() + t.worklist.pop.return_value = defer.succeed(task) - self.patches["_log_initial_start"].assert_not_called() - self.patches["_log_subsequent_starts"].assert_not_called() - self.patches["_log_incomplete"].assert_not_called() - self.patches["_log_completed"].assert_called_once_with(task) + worker = Mock(spec=["name"]) + t.pool.hire.return_value = defer.succeed(worker) - @patch("{src}.notify".format(**PATH), autospec=True) - def test__handle_failure(self, _notify): - task = Mock(spec=["failure", "completed", "workerId", "call"]) - error = Mock() + dispatch_deferred = defer.succeed(None) + t.deferLater.return_value = dispatch_deferred - self.running._handle_failure(task, error) + t.sched() - task.failure.assert_called_once_with(error) - task.completed.assert_called_once_with(error=error) - _notify.assert_called_once_with(task.completed.return_value) + t.worklist.pushfront.assert_called_once_with(task.priority, task) + t.pool.ready.assert_called_once_with(worker) - self.patches["_log_initial_start"].assert_not_called() - self.patches["_log_subsequent_starts"].assert_not_called() - self.patches["_log_incomplete"].assert_not_called() - self.patches["_log_completed"].assert_called_once_with(task) + def test_worklist_pop_error(t): + t.worklist.pop.side_effect = RuntimeError("boom") + t.sched() -class RunningExecuteTest(BaseRunning, TestCase): - """More complex testing of the execute method on the _Running class.""" + t.assertFalse(t.pool.hire.called) + t.assertFalse(t.taskDispatcher.called) + t.assertFalse(t.deferLater.called) + t.assertFalse(t.pool.ready.called) - def setUp(self): - super(RunningExecuteTest, self).setUp() - methods_to_patch = ( - "_handle_start", - "_handle_retry", - "_handle_error", - "_handle_success", - "_handle_failure", + def test_pool_hire_error(t): + call = MagicMock(spec=ServiceCall) + worklist_name = "default" + retries = 3 + task = ServiceCallTask( + call=call, worklist=worklist_name, max_retries=retries ) - self.patchers = [] - self.patches = {} - for method in methods_to_patch: - patcher = patch.object(self.running, method) - self.patches[method] = patcher.start() - self.addCleanup(patcher.stop) - self.patchers.append(patcher) - self.running.start(self.reactor) - - def test_nominal_execute(self): - task = Mock(spec=["call", "retryable"]) - task.retryable = False - worker = Mock(spec=["workerId", "run"]) - expected_result = worker.run.return_value - - handler = Mock() - dfr = self.running.execute(worker, task) - dfr.addErrback(handler) - - handler.assert_not_called() - self.assertIsInstance(dfr, defer.Deferred) - self.worklist.pop.assert_not_called() - worker.run.assert_called_once_with(task.call) - self.reactor.callLater.assert_not_called() - - self.patches["_handle_start"].assert_called_once_with( - task, - worker.workerId, + t.worklist.pop.return_value = defer.succeed(task) + + t.pool.hire.side_effect = RuntimeError("boom") + + t.sched() + + t.assertFalse(t.taskDispatcher.called) + t.assertFalse(t.deferLater.called) + t.assertFalse(t.pool.ready.called) + + def test_taskdispatcher_error(t): + call = MagicMock(spec=ServiceCall) + worklist_name = "default" + retries = 3 + task = ServiceCallTask( + call=call, worklist=worklist_name, max_retries=retries ) - self.patches["_handle_success"].assert_called_once_with( - task, - expected_result, + t.worklist.pop.return_value = defer.succeed(task) + + worker = Mock(spec=["name"]) + t.pool.hire.return_value = defer.succeed(worker) + + dispatch_deferred = defer.succeed(None) + t.deferLater.return_value = dispatch_deferred + + t.taskDispatcher.side_effect = RuntimeError("boom") + + t.sched() + + t.assertFalse(t.deferLater.called) + t.worklist.pushfront.assert_called_once_with(task.priority, task) + t.pool.ready.assert_called_once_with(worker) + + +class TaskDispatcherTest(TestCase): + def setUp(t): + t.getLogger_patcher = patch( + "{src}.getLogger".format(**PATH), + autospec=True, ) + t.getLogger = t.getLogger_patcher.start() + t.addCleanup(t.getLogger_patcher.stop) - self.logger.exception.assert_not_called() - self.logger.error.assert_not_called() - self.logger.warn.assert_not_called() - self.worklist.pushfront.assert_not_called() - self.worklist.push.assert_not_called() - self.patches["_handle_failure"].assert_not_called() - self.patches["_handle_error"].assert_not_called() - self.patches["_handle_retry"].assert_not_called() - self.workers.layoff.assert_called_once_with(worker) - - def test_remote_errors(self): - worker = Mock(spec=["workerId", "run"]) - exc = ValueError("boom") - errors = ( - RemoteException("RemoteBoom", None), - pb.RemoteError(ValueError, exc, None), + t.notify_patcher = patch( + "{src}.notify".format(**PATH), + autospec=True, ) + t.notify = t.notify_patcher.start() + t.addCleanup(t.notify_patcher.stop) - for error in errors: - with subTest(error=error): - worker.run.side_effect = error - task = Mock(spec=["call", "retryable", "priority"]) - task.retryable = True - - handler = Mock() - dfr = self.running.execute(worker, task) - dfr.addErrback(handler) - - handler.assert_not_called() - self.assertIsInstance(dfr, defer.Deferred) - worker.run.assert_called_once_with(task.call) - self.patches["_handle_start"].assert_called_once_with( - task, - worker.workerId, - ) - self.patches["_handle_failure"].assert_called_once_with( - task, - error, - ) - self.workers.layoff.assert_called_once_with(worker) - - self.logger.exception.assert_not_called() - self.logger.error.assert_not_called() - self.logger.warn.assert_not_called() - self.logger.info.assert_not_called() - self.worklist.push.assert_not_called() - self.patches["_handle_success"].assert_not_called() - self.patches["_handle_error"].assert_not_called() - self.patches["_handle_retry"].assert_not_called() - - worker.reset_mock() - for patched in self.patches.values(): - patched.reset_mock() - self.logger.reset_mock() - self.reactor.reset_mock() - self.worklist.reset_mock() - self.workers.reset_mock() - - @patch("{src}.pb.Error".format(**PATH), autospec=True) - def test_internal_errors(self, _Error): - worker = Mock(spec=["workerId", "run"]) - errors = ( - pb.ProtocolError(), - banana.BananaError(), - jelly.InsecureJelly(), + t.worker = Mock(spec=["name", "run"]) + + call = MagicMock(spec=ServiceCall) + worklist_name = "default" + retries = 3 + t.task = ServiceCallTask( + call=call, worklist=worklist_name, max_retries=retries ) + moment = time.time() + t.task.received_tm = moment - 5 + t.dispatcher = TaskDispatcher(t.worker, t.task) + + def test_nominal_call_success(t): + result = {"a": 1} + t.worker.run.return_value = defer.succeed(result) + + t.dispatcher() + + t.assertIsNotNone(t.task.started_tm) + t.assertIsNotNone(t.task.completed_tm) + t.assertTrue(t.task.deferred.called) + t.assertEqual(t.task.deferred.result, result) + t.assertFalse(t.task.retryable) + + def test_call_with_remoteerror(t): + mesg = "boom" + error = pb.RemoteError(RuntimeError, mesg, MagicMock()) + t.worker.run.return_value = defer.fail(error) + + t.dispatcher() + + t.assertIsNotNone(t.task.started_tm) + t.assertIsNotNone(t.task.completed_tm) + t.assertTrue(t.task.deferred.called) + t.assertIsInstance(t.task.deferred.result, Failure) + t.assertEqual(t.task.deferred.result.getErrorMessage(), mesg) + t.assertFalse(t.task.retryable) + + # silence 'Unhandled error in Deferred' + t.task.deferred.addErrback(lambda x: None) + + def test_call_with_remoteexception(t): + mesg = "boom" + tb = "Traceback" + expected_mesg = "{}:\n{}".format(mesg, tb) + error = RemoteException(mesg, tb) + t.worker.run.return_value = defer.fail(error) + + t.dispatcher() + + t.assertIsNotNone(t.task.started_tm) + t.assertIsNotNone(t.task.completed_tm) + t.assertTrue(t.task.deferred.called) + t.assertIsInstance(t.task.deferred.result, Failure) + t.assertEqual(t.task.deferred.result.getErrorMessage(), expected_mesg) + t.assertFalse(t.task.retryable) + + # silence 'Unhandled error in Deferred' + t.task.deferred.addErrback(lambda x: None) + + def test_call_with_retryable_connectionlost(t): + error = pb.PBConnectionLost() + t.worker.run.return_value = defer.fail(error) - for error in errors: - with subTest(error=error): - worker.run.side_effect = error - - task = Mock(spec=["call", "retryable", "priority"]) - task.retryable = True - - handler = Mock() - dfr = self.running.execute(worker, task) - dfr.addErrback(handler) - - handler.assert_not_called() - self.assertIsInstance(dfr, defer.Deferred) - worker.run.assert_called_once_with(task.call) - self.patches["_handle_start"].assert_called_once_with( - task, - worker.workerId, - ) - self.logger.error.assert_called_once_with( - "(%s) %s service=%s method=%s id=%s worker=%s", - type(error), - error, - task.call.service, - task.call.method, - task.call.id, - worker.workerId, - ) - self.patches["_handle_failure"].assert_called_once_with( - task, - _Error.return_value, - ) - self.workers.layoff.assert_called_once_with(worker) - - self.logger.exception.assert_not_called() - self.logger.warn.assert_not_called() - self.logger.info.assert_not_called() - self.worklist.push.assert_not_called() - self.patches["_handle_success"].assert_not_called() - self.patches["_handle_error"].assert_not_called() - self.patches["_handle_retry"].assert_not_called() - - worker.reset_mock() - for patched in self.patches.values(): - patched.reset_mock() - self.logger.reset_mock() - self.reactor.reset_mock() - self.worklist.reset_mock() - self.workers.reset_mock() - - def test_execute_PBConnectionLost(self): + t.dispatcher() + + t.assertIsNotNone(t.task.started_tm) + t.assertIsNotNone(t.task.completed_tm) + t.assertTrue(t.task.retryable) + + def test_call_with_unretryable_connectionlost(t): error = pb.PBConnectionLost() - worker = Mock(spec=["workerId", "run"]) - worker.run.side_effect = error - - task = Mock(spec=["call", "retryable", "priority"]) - task.retryable = True - - handler = Mock() - dfr = self.running.execute(worker, task) - dfr.addErrback(handler) - - handler.assert_not_called() - self.assertIsInstance(dfr, defer.Deferred) - worker.run.assert_called_once_with(task.call) - self.patches["_handle_start"].assert_called_once_with( - task, - worker.workerId, - ) - self.patches["_handle_retry"].assert_called_once_with(task, error) - self.worklist.pushfront.assert_called_once_with( - task.priority, - task, - ) - self.workers.layoff.assert_called_once_with(worker) - - self.logger.error.assert_not_called() - self.worklist.push.assert_not_called() - self.patches["_handle_success"].assert_not_called() - self.patches["_handle_failure"].assert_not_called() - self.patches["_handle_error"].assert_not_called() - - def test_execute_unexpected_error(self): - error = Exception() - worker = Mock(spec=["workerId", "run"]) - worker.run.side_effect = error - - task = Mock(spec=["call", "retryable", "attempt", "priority"]) - task.retryable = True - - handler = Mock() - dfr = self.running.execute(worker, task) - dfr.addErrback(handler) - - handler.assert_not_called() - self.assertIsInstance(dfr, defer.Deferred) - worker.run.assert_called_once_with(task.call) - self.patches["_handle_start"].assert_called_once_with( - task, - worker.workerId, - ) - self.patches["_handle_error"].assert_called_once_with( - task, - error, - ) - self.worklist.pushfront.assert_called_once_with( - task.priority, - task, - ) - self.workers.layoff.assert_called_once_with(worker) - self.logger.exception.assert_called_once_with( - "Unexpected failure worklist=%s", "default" + t.worker.run.return_value = defer.fail(error) + t.task.attempt = t.task.max_retries + t.task.completed_tm = time.time() + 1 + + t.dispatcher() + + t.assertIsNotNone(t.task.started_tm) + t.assertIsNotNone(t.task.completed_tm) + t.assertFalse(t.task.retryable) + t.assertTrue(t.task.deferred.called) + t.assertIsInstance(t.task.deferred.result, Failure) + t.assertEqual(t.task.deferred.result.getErrorMessage(), "") + + # silence 'Unhandled error in Deferred' + t.task.deferred.addErrback(lambda x: None) + + def test_call_with_internal_error(t): + error = RuntimeError("boom") + expected_error = _to_internal_error(error) + t.worker.run.return_value = defer.fail(error) + + t.dispatcher() + + t.assertIsNotNone(t.task.started_tm) + t.assertIsNotNone(t.task.completed_tm) + t.assertFalse(t.task.retryable) + t.assertTrue(t.task.deferred.called) + t.assertIsInstance(t.task.deferred.result, Failure) + t.assertEqual( + t.task.deferred.result.getErrorMessage(), str(expected_error) ) - self.logger.error.assert_not_called() - self.logger.warn.assert_not_called() - self.logger.info.assert_not_called() - self.worklist.push.assert_not_called() - self.patches["_handle_success"].assert_not_called() - self.patches["_handle_failure"].assert_not_called() - self.patches["_handle_retry"].assert_not_called() + # silence 'Unhandled error in Deferred' + t.task.deferred.addErrback(lambda x: None) class ServiceCallTaskTest(TestCase): """Test the ServiceCallTask class.""" - def setUp(self): - self.queue = "queue" - self.monitor = "localhost" - self.service = "service" - self.method = "method" - self.call = ServiceCall( - monitor=self.monitor, - service=self.service, - method=self.method, + def setUp(t): + t.worklist = "queue" + t.monitor = "localhost" + t.service = "service" + t.method = "method" + t.call = ServiceCall( + monitor=t.monitor, + service=t.service, + method=t.method, args=[], kwargs={}, ) - self.task = ServiceCallTask(self.queue, self.call) - - def test_expected_attributes(self): - expected_attrs = tuple( - sorted( - ( - "call", - "deferred", - "desc", - "attempt", - "priority", - "received_tm", - "started_tm", - "completed_tm", - "error", - "retryable", - "workerId", - "event_data", - "received", - "started", - "completed", - "failure", - "success", - ) - ) - ) - actual_attrs = tuple( - sorted(n for n in dir(self.task) if not n.startswith("_")) + t.retries = 4 + t.task = ServiceCallTask( + worklist=t.worklist, call=t.call, max_retries=t.retries ) - self.assertTupleEqual(expected_attrs, actual_attrs) - def test_failure_attribute(self): - self.assertTrue(callable(self.task.failure)) + def test_worklist_attribute(t): + t.assertEqual(t.task.worklist, t.worklist) + + def test_max_retries_attribute(t): + t.assertEqual(t.task.max_retries, t.retries) + + def test_call_attribute(t): + t.assertIs(t.call, t.task.call) - def test_success_attribute(self): - self.assertTrue(callable(self.task.success)) + def test_deferred_attribute(t): + t.assertIsInstance(t.task.deferred, defer.Deferred) - def test_call_attribute(self): - self.assertIs(self.call, self.task.call) + def test_desc_attribute(t): + expected_desc = "%s:%s.%s" % (t.monitor, t.service, t.method) + t.assertEqual(expected_desc, t.task.desc) - def test_deferred_attribute(self): - self.assertIsInstance(self.task.deferred, defer.Deferred) + def test_initial_attempt_value(t): + t.assertEqual(0, t.task.attempt) - def test_desc_attribute(self): - expected_desc = "%s:%s.%s" % (self.monitor, self.service, self.method) - self.assertEqual(expected_desc, self.task.desc) + def test_priority_attribute(t): + t.assertEqual(t.task.priority, ServiceCallPriority.OTHER) - def test_initial_attempt_value(self): - self.assertEqual(0, self.task.attempt) + def test_default_timestamps(t): + t.assertIsNone(t.task.received_tm) + t.assertIsNone(t.task.started_tm) + t.assertIsNone(t.task.completed_tm) - def test_initial_error_value(self): - self.assertIsNone(self.task.error) + def test_default_worker_name_attribute(t): + t.assertIsNone(t.task.worker_name) + + def test_default_event_data_attribute(t): + expected_event_data = attr.asdict(t.call) + expected_event_data.update( + { + "queue": t.worklist, + "priority": t.task.priority, + } + ) + t.assertDictEqual(t.task.event_data, expected_event_data) + + def test_retryable_initially(t): + t.assertTrue(t.task.retryable) + + def test_retryable_max_reached(t): + t.task.attempt = t.retries + 1 + t.assertFalse(t.task.retryable) + + def test_retryable_deferred_callback(t): + t.task.deferred.callback(None) + t.assertFalse(t.task.retryable) + + def test_retryable_deferred_errback(t): + t.task.deferred.errback(RuntimeError("boom")) + t.assertFalse(t.task.retryable) + + # silence 'Unhandled error in Deferred' + t.task.deferred.addErrback(lambda x: None) @patch("{src}.time".format(**PATH), autospec=True) - def test_received(self, _time): + def test_mark_received(t, _time): expected_tm = _time.time.return_value - self.task.received() - self.assertEqual(expected_tm, self.task.received_tm) + t.task.mark_received() + t.assertEqual(expected_tm, t.task.received_tm) @patch("{src}.time".format(**PATH), autospec=True) - def test_started(self, _time): - self.task.attempt = 1 + def test_mark_started(t, _time): expected_tm = _time.time.return_value - workerId = "default_0" - self.task.started(workerId) - self.assertEqual(expected_tm, self.task.started_tm) - self.assertEqual(1, self.task.attempt) - self.assertEqual(workerId, self.task.workerId) + worker_name = "default_0" + t.task.mark_started(worker_name) + t.assertEqual(expected_tm, t.task.started_tm) + t.assertEqual(1, t.task.attempt) + t.assertEqual(worker_name, t.task.worker_name) @patch("{src}.time".format(**PATH), autospec=True) - def test_completed_with_retry(self, _time): - self.task.error = Mock() - self.task.attempt = 2 + def test_mark_success(t, _time): + t.task.attempt = 1 expected_tm = _time.time.return_value - error = Mock() + result = Mock() - self.task.completed(retry=error) + t.task.mark_success(result) - self.assertTrue(self.task.retryable) - self.assertEqual(expected_tm, self.task.completed_tm) - self.assertEqual(2, self.task.attempt) - self.assertEqual(error, self.task.error) + t.assertFalse(t.task.retryable) + t.assertEqual(expected_tm, t.task.completed_tm) + t.assertEqual(1, t.task.attempt) @patch("{src}.time".format(**PATH), autospec=True) - def test_completed_with_success(self, _time): - self.task.attempt = 1 + def test_mark_failure(t, _time): + t.task.attempt = 1 expected_tm = _time.time.return_value - result = Mock() + error = RuntimeError("boom") + + t.task.mark_failure(error) - self.task.completed(result=result) + t.assertFalse(t.task.retryable) + t.assertEqual(expected_tm, t.task.completed_tm) + t.assertEqual(1, t.task.attempt) - self.assertFalse(self.task.retryable) - self.assertEqual(expected_tm, self.task.completed_tm) - self.assertEqual(1, self.task.attempt) - self.assertIsNone(self.task.error) + # silence 'Unhandled error in Deferred' + t.task.deferred.addErrback(lambda x: None) @patch("{src}.time".format(**PATH), autospec=True) - def test_completed_with_error(self, _time): - self.task.attempt = 1 + def test_mark_retry(t, _time): + t.task.attempt = 1 expected_tm = _time.time.return_value - error = Mock() - self.task.completed(error=error) + t.task.mark_retry() - self.assertFalse(self.task.retryable) - self.assertEqual(expected_tm, self.task.completed_tm) - self.assertEqual(1, self.task.attempt) - self.assertIs(self.task.error, error) + t.assertTrue(t.task.retryable) + t.assertEqual(expected_tm, t.task.completed_tm) + t.assertEqual(1, t.task.attempt) diff --git a/Products/ZenHub/server/executors/workers.py b/Products/ZenHub/server/executors/workers.py index d0d1a6c608..3ce3b5ce5d 100644 --- a/Products/ZenHub/server/executors/workers.py +++ b/Products/ZenHub/server/executors/workers.py @@ -11,34 +11,33 @@ import time +from functools import partial + +import attr + +from attr.validators import instance_of from twisted.internet import defer -from twisted.internet.task import LoopingCall -from twisted.spread import pb, banana, jelly -from zope.component import getUtility +from twisted.internet.task import deferLater, LoopingCall +from twisted.spread import pb from zope.event import notify -from Products.ZenHub.PBDaemon import RemoteException +from Products.ZenHub.errors import RemoteException from ..events import ( ServiceCallReceived, ServiceCallStarted, ServiceCallCompleted, ) -from ..interface import IHubServerConfig from ..priority import ( ModelingPaused, PrioritySelection, ServiceCallPriority, servicecall_priority_map, ) +from ..service import ServiceCall from ..worklist import ZenHubWorklist -from ..utils import UNSPECIFIED as _UNSPECIFIED, getLogger +from ..utils import getLogger -_InternalErrors = ( - pb.ProtocolError, - banana.BananaError, - jelly.InsecureJelly, -) _RemoteErrors = (RemoteException, pb.RemoteError) @@ -46,7 +45,7 @@ class WorkerPoolExecutor(object): """An executor that executes service calls using remote workers.""" @classmethod - def create(cls, name, config=None, pool=None): + def create(cls, name, config, pool): """Return a new executor instance. :param str name: The executor's name @@ -66,101 +65,111 @@ def create(cls, name, config=None, pool=None): ServiceCallPriority, exclude=modeling_paused ) worklist = ZenHubWorklist(selection) - return cls(name, worklist, pool) - - def __init__(self, name, worklist, pool): - """Initialize a WorkerPoolExecutor instance.""" - self.__name = name - self.__worklist = worklist - self.__workers = pool - self.__log = getLogger(self) - self.__states = { - "running": _Running(name, worklist, pool, self.__log), - "stopped": _Stopped(), - } - self.__state = self.__states["stopped"] + return cls(name, worklist, pool, max_retries=config.task_max_retries) + + def __init__(self, name, worklist, pool, max_retries=3): + """ + Initialize a WorkerPoolExecutor instance. + + @type name: str + @type worklist: WorkList + @type pool: WorkerPool + """ + self._name = name + self._worklist = worklist + self._pool = pool + self._max_retries = max_retries + self._log = getLogger(self) + self._scheduler = None + self._loop = None + self._loopd = None @property def name(self): """Return the name of this executor.""" - return self.__name + return self._name @property def pool(self): """Return the pool of workers available to this executor.""" - return self.__workers + return self._pool + + @property + def worklist(self): + """Return the worklist of tasks this executor will execute.""" + return self._worklist + + @property + def scheduler(self): + """Return the scheduler that will dispatch the tasks to the workers.""" + return self._scheduler + + @property + def running(self): + """Return True if the executor is running.""" + if self._loop is None: + return False + return self._loop.running def start(self, reactor): """Start the executor.""" - self.__state = self.__states["running"] - self.__state.start(reactor) + self._scheduler = Scheduler( + reactor, self._name, self._worklist, self._pool + ) + self._loop = LoopingCall(self._scheduler) + self._loopd = self._loop.start(0) + self._log.info("started scheduler worklist=%s", self._name) def stop(self): - self.__state.stop() - self.__state = self.__states["stopped"] + if self._loop is None: + return + self._loop.stop() + self._loop = self._loopd = self._scheduler = None def submit(self, call): """Submit a ServiceCall for execution. Returns a deferred that will fire when execution completes. """ - return self.__state.submit(call) + if self._scheduler is None: + return defer.fail(pb.Error("ZenHub not ready.")) + task = ServiceCallTask( + worklist=self._name, call=call, max_retries=self._max_retries + ) + task.mark_received() + self._log.info( + "received task collector=%s service=%s method=%s id=%s", + task.call.monitor, + task.call.service, + task.call.method, + task.call.id.hex, + ) + notify(EventBuilder.received(task)) + self._worklist.push(task.priority, task) + return task.deferred def __repr__(self): - return "<{0.__class__.__name__} '{1}'>".format(self, self.__name) + return "<{0.__class__.__name__} '{1}'>".format(self, self._name) -class _Stopped(object): - """WorkerPoolExecutor in stopped state.""" +class Scheduler(object): + """ + Schedule tasks for execution. + """ - def stop(self): - pass - - def submit(self, call): - return defer.fail(pb.Error("ZenHub not ready.")) - - -class _Running(object): - """WorkerPoolExecutor in running state.""" - - def __init__(self, name, worklist, pool, log): + def __init__(self, reactor, name, worklist, pool): + self.reactor = reactor self.name = name self.worklist = worklist self.workers = pool - self.log = log - config = getUtility(IHubServerConfig) - self.task_max_retries = config.task_max_retries - self.loop = LoopingCall(self.dispatch) - - def start(self, reactor): - self.reactor = reactor - self.loopd = self.loop.start(0) - - def stop(self): - self.loop.stop() - - def submit(self, call): - task = ServiceCallTask(self.name, call) - notify(task.received()) - self.log.info( - "Received task service=%s method=%s id=%s", - call.service, - call.method, - call.id.hex, - ) - self.worklist.push(task.priority, task) - return task.deferred + self.log = getLogger(self) @defer.inlineCallbacks - def dispatch(self): - """Schedule tasks for execution by workers.""" - self.log.debug( - "There are %s workers currently available to work %s tasks " - "worklist=%s", - self.workers.available, - len(self.worklist), - self.name, - ) + def __call__(self): + """ + Schedule tasks for execution by workers. + """ + task = None worker = None try: # Retrieve a task from the work queue @@ -168,282 +177,304 @@ def dispatch(self): # Retrieve a worker to execute the task. worker = yield self.workers.hire() + self.log.info( + "hired worker for task " + "worker=%s collector=%s service=%s method=%s id=%s", + worker.name, + task.call.monitor, + task.call.service, + task.call.method, + task.call.id, + ) # Schedule the worker to execute the task - self.reactor.callLater(0, self.execute, worker, task) + dispatcher = TaskDispatcher(worker, task) + deferLater(self.reactor, 0, dispatcher).addBoth( + partial(self._task_done, worker, task) + ) except Exception: - self.log.exception("Unexpected failure worklist=%s", self.name) - # Layoff the worker (if a worker was hired) + self.log.exception("unexpected failure worklist=%s", self.name) + if task and task.retryable: + self.worklist.pushfront(task.priority, task) if worker: - self.workers.layoff(worker) + self.workers.ready(worker) + + def _task_done(self, worker, task, *args): + if task.retryable: + self.worklist.pushfront(task.priority, task) + self.log.info( + "enqueued task for retry " + "collector=%s service=%s method=%s id=%s", + task.call.monitor, + task.call.service, + task.call.method, + task.call.id.hex, + ) + self.workers.ready(worker) - @defer.inlineCallbacks - def execute(self, worker, task): - """Execute the task using the worker. - :param worker: The worker to execute the task - :type worker: WorkerRef - :param task: The task to be executed by the worker - :type task: ServiceCallTask +class TaskDispatcher(object): + """ + Execute (dispatch) a task to worker and handle the result. + """ + + def __init__(self, worker, task): + """ + Initialize a TaskDispatcher instance. + + @param worker: The worker to execute the task + @type worker: WorkerRef + @param task: The task to be executed by the worker + @type task: ServiceCallTask + """ + self.worker = worker + self.task = task + self.log = getLogger(self) + + @defer.inlineCallbacks + def __call__(self): + """ + Execute the task using the worker. """ + status = result = None try: - # Notify listeners of a task execution attempt. - self._handle_start(task, worker.workerId) - - # Run the task - result = yield worker.run(task.call) - - # Task succeeded, process the result - self._handle_success(task, result) - except _RemoteErrors as ex: - # These kinds of errors originate from the service and - # are propagated directly back to the submitter. - self._handle_failure(task, ex) - except _InternalErrors as ex: - # These are un-retryable errors that occur while attempting - # to execute the call, so are logged and a summary error is - # returned to the submitter. - self.log.error( - "(%s) %s service=%s method=%s id=%s worker=%s", - type(ex), - ex, - task.call.service, - task.call.method, - task.call.id, - worker.workerId, - ) - error = pb.Error( - ("Internal ZenHub error: ({0.__class__.__name__}) {0}") - .format(ex) - .strip() - ) - self._handle_failure(task, error) + # Prepare to execute the task. + self.task.mark_started(self.worker.name) + if self.task.attempt == 1: + _log_initial_attempt(self.task, self.log) + else: + _log_subsequent_attempts(self.task, self.log) + notify(EventBuilder.started(self.task)) + + # Execute the task + result = yield self.worker.run(self.task.call) + + # Mark the task execution as successful + self.task.mark_success(result) + _log_completed("success", self.task, self.log) + status = "result" + except (RemoteException, pb.RemoteError) as ex: + # These are unretryable errors that originate from the service + # and are propagated directly back to the submitter. + self.task.mark_failure(ex) + _log_completed("failed", self.task, self.log) + status, result = "error", ex except pb.PBConnectionLost as ex: # Lost connection to the worker; not a failure. - # The attempt count is _not_ incremented. self.log.warn( - "Worker no longer accepting work worker=%s error=%s", - worker.workerId, - ex, + "worker no longer accepting tasks worker=%s", + self.worker.name, ) - self._handle_retry(task, ex) - except Exception as ex: - self._handle_error(task, ex) - self.log.exception("Unexpected failure worklist=%s", self.name) - finally: - # if the task is retryable, push the task - # to the front of its queue. - if task.retryable: - self.worklist.pushfront(task.priority, task) - self.log.debug( - "Task queued for retry service=%s method=%s id=%s", - task.call.service, - task.call.method, - task.call.id.hex, + if self.task.retryable: + self.task.mark_retry() + _log_retry(self.task, ex, self.log) + status, result = "retry", ex + else: + self.log.warn( + "retries exhausted " + "collector=%s service=%s method=%s id=%s", + self.task.call.monitor, + self.task.call.service, + self.task.call.method, + self.task.call.id.hex, ) - # Make the worker available for work again - self.workers.layoff(worker) - - def _handle_start(self, task, workerId): - task.attempt += 1 - notify(task.started(workerId)) - if task.attempt == 1: - self._log_initial_start(task) - else: - self._log_subsequent_starts(task) - - def _handle_error(self, task, exception): - if task.attempt >= self.task_max_retries: - # No more attempts, handle the error as a failure. - self.log.warn( - "Retries exhausted service=%s method=%s id=%s", - task.call.service, - task.call.method, - task.call.id.hex, - ) - ex = pb.Error( - ("Internal ZenHub error: ({0.__class__.__name__}) {0}") - .format(exception) - .strip() + self.task.mark_failure(ex) + _log_completed("failed", self.task, self.log) + status, result = "error", ex + except Exception as ex: + # 'catch-all' error handler and tasks are not retryable. + self.task.mark_failure(_to_internal_error(ex)) + _log_completed("failed", self.task, self.log) + status, result = "error", ex + self.log.exception( + "unexpected failure " + "worklist=%s collector=%s service=%s method=%s id=%s", + self.task.worklist, + self.task.call.monitor, + self.task.call.service, + self.task.call.method, + self.task.call.id.hex, ) - self._handle_failure(task, ex) - else: - # Still have attempts, handle the error as a retry. - self._handle_retry(task, exception) - - def _handle_retry(self, task, exception): - notify(task.completed(retry=exception)) - self._log_incomplete(task) - - def _handle_success(self, task, result): - # Send the result back to the submitter - task.success(result) - # Notify listeners of call completion (and success) - notify(task.completed(result=result)) - self._log_completed(task) - - def _handle_failure(self, task, exception): - # Send failure back to the submitter - task.failure(exception) - # Notify listeners of call completion (and failure) - notify(task.completed(error=exception)) - self._log_completed(task) - - def _log_initial_start(self, task): - call = task.call - waited = task.started_tm - task.received_tm - self.log.info( - "Begin task service=%s method=%s id=%s worker=%s waited=%0.2f", - call.service, - call.method, - call.id.hex, - task.workerId, - waited, - ) + finally: + notify(EventBuilder.completed(self.task, status, result)) - def _log_subsequent_starts(self, task): - call = task.call - waited = task.started_tm - task.completed_tm - self.log.info( - "Retry task service=%s method=%s id=%s " - "worker=%s attempt=%s waited=%0.2f", - call.service, - call.method, - call.id.hex, - task.workerId, - task.attempt, - waited, - ) - def _log_incomplete(self, task): - call = task.call - elapsed = task.completed_tm - task.started_tm - self.log.info( - "Failed to complete task service=%s method=%s id=%s " - "worker=%s duration=%0.2f error=%s", - call.service, - call.method, - call.id.hex, - task.workerId, - elapsed, - task.error, - ) +def _to_internal_error(exception): + return pb.Error( + ("Internal ZenHub error: ({0.__class__.__name__}) {0}") + .format(exception) + .strip() + ) + - def _log_completed(self, task): - call = task.call - elapsed = task.completed_tm - task.started_tm - lifetime = task.completed_tm - task.received_tm - status = "success" if not task.error else "failed" - self.log.info( - "Completed task service=%s method=%s id=%s " - "worker=%s status=%s duration=%0.2f lifetime=%0.2f", - call.service, - call.method, - call.id.hex, - task.workerId, - status, - elapsed, - lifetime, +class EventBuilder(object): + @staticmethod + def received(task): + """Return a ServiceCallReceived object.""" + data = dict(task.event_data) + data["timestamp"] = task.received_tm + return ServiceCallReceived(**data) + + @staticmethod + def started(task): + """Return a ServiceCallStarted object.""" + data = dict(task.event_data) + data.update({"timestamp": task.started_tm, "attempts": task.attempt}) + return ServiceCallStarted(**data) + + @staticmethod + def completed(task, key, value): + """Return a ServiceCallCompleted object.""" + data = dict(task.event_data) + data.update( + { + "timestamp": task.completed_tm, + "attempts": task.attempt, + key: value, + } ) + return ServiceCallCompleted(**data) +@attr.s(slots=True) class ServiceCallTask(object): """Wraps a ServiceCall to track for use with WorkerPoolExecutor.""" - __slots__ = ( - "call", - "deferred", - "desc", - "attempt", - "priority", - "received_tm", - "started_tm", - "completed_tm", - "error", - "retryable", - "workerId", - "event_data", - ) + call = attr.ib(validator=instance_of(ServiceCall)) + worklist = attr.ib(converter=str) + max_retries = attr.ib(converter=int) + + deferred = attr.ib(factory=defer.Deferred) + + attempt = attr.ib(default=0) + received_tm = attr.ib(default=None) + started_tm = attr.ib(default=None) + completed_tm = attr.ib(default=None) + worker_name = attr.ib(default=None) - def __init__(self, name, call): - self.call = call - self.deferred = defer.Deferred() - self.desc = "%s:%s.%s" % (call.monitor, call.service, call.method) - self.attempt = 0 + # These attributes are initialized in __attrs_post_init__. + desc = attr.ib(init=False) + priority = attr.ib(init=False) + event_data = attr.ib(init=False) + + def __attrs_post_init__(self): + self.desc = "%s:%s.%s" % ( + self.call.monitor, + self.call.service, + self.call.method, + ) self.priority = servicecall_priority_map.get( (self.call.service, self.call.method), ) - self.received_tm = None - self.started_tm = None - self.completed_tm = None - self.error = None - self.retryable = True - self.workerId = None - self.event_data = dict(call) + self.event_data = attr.asdict(self.call) self.event_data.update( { - "queue": name, + "queue": self.worklist, "priority": self.priority, } ) - def received(self): - """Return a ServiceCallReceived object.""" + @property + def retryable(self): + """ + Return True if the task can be re-executed. + """ + if self.deferred.called: + return False + return self.attempt <= self.max_retries + + def mark_received(self): + """ + Update the task's state to indicate task acceptance. + """ self.received_tm = time.time() - data = dict(self.event_data) - data["timestamp"] = self.received_tm - return ServiceCallReceived(**data) - def started(self, workerId): - """Return a ServiceCallStarted object.""" + def mark_started(self, worker_name): + """ + Update the task's state to indicate the task's execution. + """ + self.attempt += 1 self.started_tm = time.time() - self.workerId = workerId - self.event_data["worker"] = workerId - data = dict(self.event_data) - data.update( - { - "timestamp": self.started_tm, - "attempts": self.attempt, - } - ) - return ServiceCallStarted(**data) + self.worker_name = worker_name + self.event_data["worker"] = worker_name # needed for completed event - def completed( - self, - result=_UNSPECIFIED, - error=_UNSPECIFIED, - retry=_UNSPECIFIED, - ): - """Return a ServiceCallCompleted object.""" + def mark_success(self, result): + """ + Update the task's state to indicate the task's successful completion. + """ self.completed_tm = time.time() - if result is not _UNSPECIFIED: - key, value = ("result", result) - self.error = None - self.retryable = False - elif error is not _UNSPECIFIED: - key, value = ("error", error) - self.error = error - self.retryable = False - elif retry is not _UNSPECIFIED: - key, value = ("retry", retry) - self.error = retry - self.retryable = True - else: - raise TypeError( - "Require one of 'result', 'error', or 'retry' parameters", - ) - data = dict(self.event_data) - data.update( - { - "timestamp": self.completed_tm, - "attempts": self.attempt, - key: value, - } - ) - return ServiceCallCompleted(**data) + self.deferred.callback(result) - def failure(self, error): + def mark_failure(self, error): + """ + Update the task's state to indicate the task's failed completion. + """ + self.completed_tm = time.time() self.deferred.errback(error) - def success(self, result): - self.deferred.callback(result) + def mark_retry(self): + """ + Update the task's state to indicate the task's incomplete execution. + """ + self.completed_tm = time.time() + + +def _log_retry(task, error, log): + elapsed = task.completed_tm - task.started_tm + log.info( + "failed to complete task collector=%s service=%s method=%s " + "id=%s worker=%s duration=%0.2f error=%s", + task.call.monitor, + task.call.service, + task.call.method, + task.call.id.hex, + task.worker_name, + elapsed, + error, + ) + + +def _log_initial_attempt(task, log): + waited = task.started_tm - task.received_tm + log.info( + "begin task " + "collector=%s service=%s method=%s id=%s worker=%s waited=%0.2f", + task.call.monitor, + task.call.service, + task.call.method, + task.call.id.hex, + task.worker_name, + waited, + ) + + +def _log_subsequent_attempts(task, log): + waited = task.started_tm - task.completed_tm + log.info( + "retry task collector=%s service=%s method=%s id=%s " + "worker=%s attempt=%s waited=%0.2f", + task.call.monitor, + task.call.service, + task.call.method, + task.call.id.hex, + task.worker_name, + task.attempt, + waited, + ) + + +def _log_completed(status, task, log): + elapsed = task.completed_tm - task.started_tm + lifetime = task.completed_tm - task.received_tm + log.info( + "completed task collector=%s service=%s method=%s id=%s " + "worker=%s status=%s duration=%0.2f lifetime=%0.2f", + task.call.monitor, + task.call.service, + task.call.method, + task.call.id.hex, + task.worker_name, + status, + elapsed, + lifetime, + ) diff --git a/Products/ZenHub/server/metrics.py b/Products/ZenHub/server/metrics.py index 6130722ced..761414e3a7 100644 --- a/Products/ZenHub/server/metrics.py +++ b/Products/ZenHub/server/metrics.py @@ -97,7 +97,8 @@ def decrementLegacyMetricCounters(event): global _legacy_worklist_counters for key in (event.priority, "total"): _legacy_worklist_counters[key] -= 1 - # If the count falls below zero, there's a bug and should be logged. + # If the count falls below zero, + # there's a bug and should be logged. if _legacy_worklist_counters[key] < 0: log.warn( "Counter is negative worklist=%s value=%s", diff --git a/Products/ZenHub/server/priority.py b/Products/ZenHub/server/priority.py index 67c8f99b07..9dd47f2328 100644 --- a/Products/ZenHub/server/priority.py +++ b/Products/ZenHub/server/priority.py @@ -10,9 +10,12 @@ from __future__ import absolute_import import collections -import enum from itertools import chain, count, cycle + +import enum +import six + from zope.component import getUtility from Products.Zuul.interfaces import IDataRootFactory @@ -150,7 +153,7 @@ def _build_weighted_list(data): # Generate a series of weights. The first element should have the # highest weight. - weights = [(2**n) - 1 for n in range(len(elements), 0, -1)] + weights = [(2 ** n) - 1 for n in range(len(elements), 0, -1)] # Build a list of element lists where each element list has a length # matching their weight. E.g. given elements ('a', 'b') and weights @@ -301,7 +304,7 @@ def __len__(self): return len(self.__map) def __makekey(self, key): - if isinstance(key, basestring): + if isinstance(key, six.string_types): key = str(key).split(":") service, method = key if service not in self.__services: diff --git a/Products/ZenHub/server/service.py b/Products/ZenHub/server/service.py index 2b9dfac327..141e58e30a 100644 --- a/Products/ZenHub/server/service.py +++ b/Products/ZenHub/server/service.py @@ -15,14 +15,17 @@ import time import uuid +import attr + +from attr.validators import instance_of from twisted.internet import defer from twisted.spread import pb from zope.component import getUtility from zope.event import notify -from Products.ZenHub.PBDaemon import RemoteBadMonitor, RemoteException from Products.Zuul.interfaces import IDataRootFactory +from ..errors import RemoteBadMonitor, RemoteException from .events import ServiceAddedEvent from .utils import getLogger, import_service_class @@ -147,46 +150,32 @@ def __call__(self, dmd, monitor, name): raise +@attr.s(slots=True, frozen=True) class ServiceCall(object): """Metadata for calling a method on a service.""" - __slots__ = { - "id": "Unique instance identifier", - "monitor": "Name of performance monitor", - "service": "Name of service class", - "method": "Name of method found in service class", - "args": "Positional arguments to the method", - "kwargs": "Keyword arguments to the method", - } + monitor = attr.ib(converter=str) + """Name of the performance monitor (aka collector)""" - def __init__(self, **kw): - """Initialize a ServiceCall instance. + service = attr.ib(converter=str) + """Name of the ZenHub service class""" - :param str monitor: Name of the performance monitor (collector) - :param str service: Name of the service - :param str method: Name of the method on the service - :param Sequence[Any] args: positional arguments to method - :param Mapping[str, Any] kwargs: keyword arguments to method - """ - self.id = uuid.uuid4() - self.monitor = kw.pop("monitor") - self.service = kw.pop("service") - self.method = kw.pop("method") - self.args = kw.pop("args") - self.kwargs = kw.pop("kwargs") - # Raise an exception if other arguments are given. - if kw: - raise AttributeError( - "%s has no attribute%s: %s" - % ( - type(self).__name__, - "" if len(kw) == 1 else "s", - ", ".join(kw.keys()), - ) - ) + method = attr.ib(converter=str) + """Name of the method to call on the ZenHub service class""" - def __iter__(self): - return ((name, getattr(self, name)) for name in ServiceCall.__slots__) + args = attr.ib() + """Positional arguments to the method""" + + kwargs = attr.ib(validator=instance_of(dict)) + """Keyword arguments to the method""" + + id = attr.ib(factory=uuid.uuid4) + """Unique instance identifier""" + + @args.validator + def _check_args(self, attribute, value): + if not isinstance(value, (list, tuple)): + raise TypeError("args must be a list or tuple") class ServiceReferenceFactory(object): @@ -274,11 +263,11 @@ def remoteMessageReceived(self, broker, message, args, kw): self.__name, self.__monitor, ) - state = yield executor.submit(call) - response = broker.serialize(state, self.perspective) + result = yield executor.submit(call) + response = broker.serialize(result, self.perspective) success = True defer.returnValue(response) - except _PropagatingErrors as ex: + except _PropagatingErrors: raise except Exception as ex: self.__log.exception( diff --git a/Products/ZenHub/server/tests/test_avatar.py b/Products/ZenHub/server/tests/test_avatar.py index 4180329e2d..b7b7c4bbc4 100644 --- a/Products/ZenHub/server/tests/test_avatar.py +++ b/Products/ZenHub/server/tests/test_avatar.py @@ -10,7 +10,7 @@ from __future__ import absolute_import from unittest import TestCase -from mock import Mock, patch, create_autospec, sentinel +from mock import create_autospec, MagicMock, patch, sentinel from ..avatar import HubAvatar, RemoteBadMonitor, pb from ..service import ServiceManager @@ -22,27 +22,26 @@ class HubAvatarTest(TestCase): """Test the HubAvatar class.""" - def setUp(self): - self.getLogger_patcher = patch( - "{src}.getLogger".format(**PATH), - autospec=True, + def setUp(t): + t.getLogger_patcher = patch( + "{src}.getLogger".format(**PATH), autospec=True ) - self.getLogger = self.getLogger_patcher.start() - self.addCleanup(self.getLogger_patcher.stop) + t.getLogger = t.getLogger_patcher.start() + t.addCleanup(t.getLogger_patcher.stop) - self.services = create_autospec(ServiceManager) - self.pools = { - "foo": create_autospec(WorkerPool), - "bar": create_autospec(WorkerPool), + t.services = create_autospec(ServiceManager) + t.pools = { + "foo": WorkerPool("foo"), + "bar": WorkerPool("bar"), } - self.avatar = HubAvatar(self.services, self.pools) + t.avatar = HubAvatar(t.services, t.pools) - def test_perspective_ping(self): - ret = self.avatar.perspective_ping() - self.assertEqual(ret, "pong") + def test_perspective_ping(t): + ret = t.avatar.perspective_ping() + t.assertEqual(ret, "pong") - @patch("{src}.os.environ".format(**PATH), name="os.environ", autospec=True) - def test_perspective_getHubInstanceId_normal(self, os_environ): + @patch("{src}.os".format(**PATH), name="os", autospec=True) + def test_perspective_getHubInstanceId_normal(t, _os): key = "CONTROLPLANE_INSTANCE_ID" hubId = "hub" @@ -51,97 +50,108 @@ def side_effect(k, d): return hubId return d - os_environ.get.side_effect = side_effect + _os.environ.get.side_effect = side_effect - actual = self.avatar.perspective_getHubInstanceId() + actual = t.avatar.perspective_getHubInstanceId() - self.assertEqual(actual, hubId) + t.assertEqual(actual, hubId) - @patch("{src}.os.environ".format(**PATH), name="os.environ", autospec=True) - def test_perspective_getHubInstanceId_unknown(self, os_environ): - os_environ.get.side_effect = lambda k, d: d - actual = self.avatar.perspective_getHubInstanceId() - self.assertEqual(actual, "Unknown") + @patch("{src}.os".format(**PATH), name="os", autospec=True) + def test_perspective_getHubInstanceId_unknown(t, _os): + _os.environ.get.side_effect = lambda k, d: d + actual = t.avatar.perspective_getHubInstanceId() + t.assertEqual(actual, "Unknown") - def test_perspective_getService_no_listener(self): + def test_perspective_getService_no_listener(t): service_name = "testservice" monitor = "localhost" - expected = self.services.getService.return_value - actual = self.avatar.perspective_getService(service_name, monitor) + expected = t.services.getService.return_value + actual = t.avatar.perspective_getService(service_name, monitor) - self.services.getService.assert_called_with(service_name, monitor) + t.services.getService.assert_called_with(service_name, monitor) expected.addListener.assert_not_called() - self.assertEqual(expected, actual) + t.assertEqual(expected, actual) - def test_perspective_getService_with_listener(self): + def test_perspective_getService_with_listener(t): service_name = "testservice" monitor = "localhost" listener = sentinel.listener options = sentinel.options - expected = self.services.getService.return_value - actual = self.avatar.perspective_getService( + expected = t.services.getService.return_value + actual = t.avatar.perspective_getService( service_name, monitor, listener=listener, options=options, ) - self.services.getService.assert_called_with(service_name, monitor) + t.services.getService.assert_called_with(service_name, monitor) expected.addListener.assert_called_once_with(listener, options) - self.assertEqual(expected, actual) + t.assertEqual(expected, actual) - def test_perspective_getService_raises_RemoteBadMonitor(self): - self.services.getService.side_effect = RemoteBadMonitor("tb", "msg") - with self.assertRaises(RemoteBadMonitor): - self.avatar.perspective_getService("service_name") + def test_perspective_getService_raises_RemoteBadMonitor(t): + t.services.getService.side_effect = RemoteBadMonitor("tb", "msg") + with t.assertRaises(RemoteBadMonitor): + t.avatar.perspective_getService("service_name") @patch("{src}.getLogger".format(**PATH)) - def test_perspective_getService_raises_error(self, getLogger): + def test_perspective_getService_raises_error(t, getLogger): logger = getLogger.return_value - self.avatar._HubAvatar__log = logger + t.avatar._HubAvatar__log = logger service_name = "service_name" - self.services.getService.side_effect = Exception() + t.services.getService.side_effect = Exception() - with self.assertRaises(pb.Error): - self.avatar.perspective_getService(service_name) + with t.assertRaises(pb.Error): + t.avatar.perspective_getService(service_name) logger.exception.assert_called_once_with( "Failed to get service '%s'", service_name, ) - def test_perspective_reportingForWork_nominal(self): - worker = Mock( - spec_set=[ - "workerId", - "sessionId", - "queue_name", - "notifyOnDisconnect", - ] - ) - workerId = "default-1" + def test_perspective_reportForWork_nominal(t): + remote = MagicMock(pb.RemoteReference, autospec=True) + pool_name = "foo" + name = "default_0" disconnect_callback = [] def _notifyOnDisconnect(callback): disconnect_callback.append(callback) - worker.notifyOnDisconnect.side_effect = _notifyOnDisconnect + remote.notifyOnDisconnect.side_effect = _notifyOnDisconnect # Add the worker - self.avatar.perspective_reportingForWork(worker, workerId, "foo") - self.assertTrue(hasattr(worker, "sessionId")) - self.assertIsNotNone(worker.sessionId) - self.assertTrue(hasattr(worker, "workerId")) - self.assertEqual(worker.workerId, workerId) - self.pools["foo"].add.assert_called_once_with(worker) + t.avatar.perspective_reportForWork(remote, name, pool_name) + + foo = t.pools[pool_name] + worker = foo.get(name) + t.assertIsNotNone(worker) + t.assertEqual(worker.remote, remote) # Remove the worker - self.assertEqual( + t.assertEqual( 1, len(disconnect_callback), "notifyOnDisconnect not called", ) - disconnect_callback[0](worker) - self.pools["foo"].remove.assert_called_once_with(worker) + disconnect_callback[0](remote) + worker = foo.get(name) + t.assertIsNone(worker) + + def test_perspective_resignFromWork_nominal(t): + remote = MagicMock(pb.RemoteReference, autospec=True) + name = "default-1" + worklist = "foo" + pool = t.pools[worklist] + + # Add the worker + t.avatar.perspective_reportForWork(remote, name, worklist) + worker = pool.get(name) + t.assertIsNotNone(worker) + + # Resign the worker + t.avatar.perspective_resignFromWork(name, worklist) + worker = pool.get(name) + t.assertIsNone(worker) diff --git a/Products/ZenHub/server/tests/test_events.py b/Products/ZenHub/server/tests/test_events.py index 2815073bd9..318ad5d169 100644 --- a/Products/ZenHub/server/tests/test_events.py +++ b/Products/ZenHub/server/tests/test_events.py @@ -9,174 +9,135 @@ from __future__ import absolute_import +import time +import uuid + from unittest import TestCase from ..events import ( - ServiceCallEvent, ServiceCallReceived, ServiceCallStarted, ServiceCallCompleted, ) +from ..priority import ServiceCallPriority from ..utils import subTest -class ServiceCallEventTest(TestCase): - """Test the ServiceCallEvent class.""" - - def test_attribute_names(self): - event = ServiceCallEvent() - expected = [] - names = sorted(n for n in dir(event) if not n.startswith("_")) - self.assertSequenceEqual(expected, names) - - def test_for_slots(self): - self.assertTrue(hasattr(ServiceCallEvent, "__slots__")) - - class ServiceCallReceivedTest(TestCase): """Test the ServiceCallReceived class.""" - def test_for_slots(self): - self.assertTrue(hasattr(ServiceCallReceived, "__slots__")) - - def test_default_values(self): - event = ServiceCallReceived() - self.assertTrue( - all( - getattr(event, name) is None - for name in ServiceCallReceived.__slots__ - ) + def test_nominal(self): + evid = uuid.uuid4() + now = time.time() + + _ = ServiceCallReceived( + id=evid, + monitor="localhost", + service="PingPerf", + method="getPingStuff", + args=[], + kwargs={}, + timestamp=now, + queue="default", + priority=ServiceCallPriority.OTHER, ) - def test_expected_init_args(self): - names = ( - "id", - "monitor", - "service", - "method", - "args", - "kwargs", - "timestamp", - "queue", - "priority", - ) - args = {k: None for k in names} - ServiceCallReceived(**args) - def test_no_extra_init_args(self): args = ("worker", "attempts", "error", "retry", "result", "foo") for arg in args: with subTest(arg=arg): - with self.assertRaises(AssertionError): + with self.assertRaises(TypeError): ServiceCallReceived(**{arg: None}) class ServiceCallStartedTest(TestCase): """Test the ServiceCallStarted class.""" - def test_for_slots(self): - self.assertTrue(hasattr(ServiceCallStarted, "__slots__")) - - def test_default_values(self): - event = ServiceCallStarted(attempts=1) - self.assertTrue( - all( - getattr(event, name) is None - for name in ServiceCallEvent.__slots__ - if name != "attempts" - ) + def test_nominal(self): + evid = uuid.uuid4() + now = time.time() + + _ = ServiceCallStarted( + id=evid, + monitor="localhost", + service="PingPerf", + method="getPingStuff", + args=[], + kwargs={}, + timestamp=now, + queue="default", + priority=ServiceCallPriority.OTHER, + worker="default_0", + attempts=1, ) - self.assertEqual(1, event.attempts) - def test_expected_init_args(self): - names = ( - "id", - "monitor", - "service", - "method", - "args", - "kwargs", - "timestamp", - "queue", - "priority", - "worker", - ) - args = {k: None for k in names} - args["attempts"] = 1 - ServiceCallStarted(**args) def test_no_extra_init_args(self): tests = ("error", "retry", "result", "foo") for test in tests: with subTest(test=test): args = {"attempts": 1, test: None} - with self.assertRaises(AssertionError): + with self.assertRaises(TypeError): ServiceCallStarted(**args) class ServiceCallCompletedTest(TestCase): """Test the ServiceCallCompleted class.""" - def test_for_slots(self): - self.assertTrue(hasattr(ServiceCallCompleted, "__slots__")) + def setUp(self): + self.evid = uuid.uuid4() + self.now = time.time() + self.base = { + "id": self.evid, + "monitor": "localhost", + "service": "PingPerf", + "method": "getPingStuff", + "args": [], + "kwargs": {}, + "timestamp": self.now, + "queue": "default", + "priority": ServiceCallPriority.OTHER, + "worker": "default_0", + "attempts": 1, + } def test_mismatched_arguments(self): exc = RuntimeError() result = 10 tests = ( - {"attempts": 0}, - {"attempts": 0, "error": exc}, - {"attempts": 0, "retry": exc}, - {"attempts": 0, "result": result}, - {"attempts": 0, "error": exc, "retry": exc}, - {"attempts": 1}, - {"attempts": 1, "result": result, "error": exc, "retry": exc}, - {"attempts": 1, "error": exc, "result": result}, + {}, + {"error": exc, "retry": exc}, + {"result": result, "error": exc, "retry": exc}, + {"error": exc, "result": result}, ) - for args in tests: + for sample in tests: + args = dict(self.base) + args.update(**sample) with subTest(args=args): - with self.assertRaises(AssertionError): + with self.assertRaises(TypeError): ServiceCallCompleted(**args) def test_nominal_init_failure(self): exc = RuntimeError() - event = ServiceCallCompleted(attempts=1, error=exc) - self.assertTrue( - all( - getattr(event, name) is None - for name in ServiceCallEvent.__slots__ - if name not in ("attempts", "error") - ) - ) + self.base["error"] = exc + event = ServiceCallCompleted(**self.base) self.assertEqual(1, event.attempts) self.assertIs(exc, event.error) def test_nominal_init_retry(self): exc = RuntimeError() - event = ServiceCallCompleted(attempts=1, retry=exc) - self.assertTrue( - all( - getattr(event, name) is None - for name in ServiceCallEvent.__slots__ - if name not in ("attempts", "retry") - ) - ) + self.base["retry"] = exc + event = ServiceCallCompleted(**self.base) self.assertEqual(1, event.attempts) self.assertIs(exc, event.retry) def test_nominal_init_success(self): result = 10 - event = ServiceCallCompleted(attempts=1, result=result) - self.assertTrue( - all( - getattr(event, name) is None - for name in ServiceCallEvent.__slots__ - if name not in ("attempts", "result") - ) - ) + self.base["result"] = result + event = ServiceCallCompleted(**self.base) self.assertEqual(1, event.attempts) self.assertIs(result, event.result) def test_no_extra_init_args(self): - with self.assertRaises(AssertionError): + with self.assertRaises(TypeError): ServiceCallCompleted(attempts=1, foo=10) diff --git a/Products/ZenHub/server/tests/test_service.py b/Products/ZenHub/server/tests/test_service.py index 861d5a44b9..86070fcc80 100644 --- a/Products/ZenHub/server/tests/test_service.py +++ b/Products/ZenHub/server/tests/test_service.py @@ -10,10 +10,15 @@ from __future__ import absolute_import from unittest import TestCase + +import attr + from mock import Mock, patch, call, MagicMock, sentinel from zope.interface.verify import verifyObject -from Products.ZenHub.PBDaemon import RemoteException +from Products.ZenHub.errors import RemoteException + +from ..events import IServiceAddedEvent from ..exceptions import UnknownServiceError from ..service import ( ServiceManager, @@ -28,7 +33,6 @@ pb, defer, ) -from ..events import IServiceAddedEvent PATH = {"src": "Products.ZenHub.server.service"} @@ -40,7 +44,7 @@ def setUp(self): self.monitor = "monitor" self.service = "name" self.method = "method" - self.args = () + self.args = [] self.kwargs = {} self.call = ServiceCall( monitor=self.monitor, @@ -60,7 +64,7 @@ def test_nominal_initialization(self): self.assertIsNotNone(self.call.id) def test_extra_arg_initialization(self): - with self.assertRaises(AttributeError): + with self.assertRaises(TypeError): ServiceCall( monitor="monitor", service="name", @@ -78,7 +82,7 @@ def test_dict_conversion(self): "args": self.args, "kwargs": self.kwargs, } - dmap = dict(self.call) + dmap = attr.asdict(self.call) _id = dmap.pop("id", None) self.assertIsNotNone(_id) self.assertDictEqual(expected, dmap) @@ -360,7 +364,9 @@ def test_remoteMessageReceived_raise_external_error(self): RemoteException("boom", "tb"), ] for expected_ex in exceptions: - executor.submit.side_effect = lambda j: defer.fail(expected_ex) + executor.submit.side_effect = lambda j, ex=expected_ex: defer.fail( + ex + ) cb = Mock() dfr = self.reference.remoteMessageReceived( self.broker, diff --git a/Products/ZenHub/server/tests/test_worker.py b/Products/ZenHub/server/tests/test_worker.py new file mode 100644 index 0000000000..00c7ddb356 --- /dev/null +++ b/Products/ZenHub/server/tests/test_worker.py @@ -0,0 +1,160 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from unittest import TestCase + +from mock import MagicMock, patch +from twisted.internet import defer +from twisted.python.failure import Failure + +from ..service import ServiceCall +from ..worker import pb, ServiceRegistry, Worker + +PATH = {"src": "Products.ZenHub.server.worker"} + + +class WorkerTest(TestCase): + def setUp(t): + t.getLogger_patcher = patch( + "{src}.getLogger".format(**PATH), + autospec=True, + ) + t.getLogger = t.getLogger_patcher.start() + t.addCleanup(t.getLogger_patcher.stop) + + t.name = "default_0" + t.remote = MagicMock(pb.RemoteReference, autospec=True) + t.worker = Worker(name=t.name, remote=t.remote) + + def test_properties(t): + t.assertEqual(t.worker.remote, t.remote) + t.assertEqual(t.worker.name, t.name) + t.assertIsInstance(t.worker.services, ServiceRegistry) + + def test_uncached_service_reference(t): + service = MagicMock(pb.RemoteReference, autospect=True) + t.remote.callRemote.return_value = defer.succeed(service) + call = ServiceCall( + monitor="localhost", + service="service", + method="method", + args=[], + kwargs={}, + ) + expected_result = service.callRemote.return_value + + dfr = t.worker.run(call) + + t.assertIsInstance(dfr, defer.Deferred) + t.assertTrue(dfr.called) + t.assertEqual(dfr.result, expected_result) + t.remote.callRemote.assert_called_once_with( + "getService", call.service, call.monitor + ) + service.callRemote.assert_called_once_with(call.method) + + cached_service = t.worker.services.get("localhost", "service") + t.assertEqual(cached_service, service) + + def test_cached_service_reference(t): + service_ref = MagicMock(pb.RemoteReference, autospect=True) + monitor = "localhost" + service_name = "service" + t.worker.services.add(monitor, service_name, service_ref) + call = ServiceCall( + monitor=monitor, + service=service_name, + method="method", + args=[], + kwargs={}, + ) + expected_result = service_ref.callRemote.return_value + + dfr = t.worker.run(call) + + t.assertIsInstance(dfr, defer.Deferred) + t.assertTrue(dfr.called) + t.assertEqual(dfr.result, expected_result) + t.remote.callRemote.assert_not_called() + service_ref.callRemote.assert_called_once_with(call.method) + + def test_run_method_with_args(t): + service_ref = MagicMock(pb.RemoteReference, autospect=True) + monitor = "localhost" + service_name = "service" + t.worker.services.add(monitor, service_name, service_ref) + call = ServiceCall( + monitor=monitor, + service=service_name, + method="method", + args=["arg"], + kwargs={"arg": 1}, + ) + expected_result = service_ref.callRemote.return_value + + dfr = t.worker.run(call) + + t.assertIsInstance(dfr, defer.Deferred) + t.assertTrue(dfr.called) + t.assertEqual(dfr.result, expected_result) + service_ref.callRemote.assert_called_once_with( + call.method, + call.args[0], + arg=call.kwargs["arg"], + ) + + def test_bad_service(t): + expected_error = ValueError("boom") + t.remote.callRemote.side_effect = expected_error + call = ServiceCall( + monitor="localhost", + service="the_service", + method="method", + args=[], + kwargs={}, + ) + + dfr = t.worker.run(call) + + t.assertIsInstance(dfr.result, Failure) + + actual_error = dfr.result.value + t.assertIsInstance(actual_error, ValueError) + t.assertIs(actual_error, expected_error) + + # add an errback to silence the unhandled deferred error message + dfr.addErrback(lambda x: None) + + def test_remote_method_failure(t): + service_ref = MagicMock(pb.RemoteReference, autospect=True) + monitor = "localhost" + service_name = "service" + expected_error = ValueError("boom") + service_ref.callRemote.side_effect = expected_error + t.worker.services.add(monitor, service_name, service_ref) + call = ServiceCall( + monitor="localhost", + service="service", + method="method", + args=[], + kwargs={}, + ) + + dfr = t.worker.run(call) + + service_ref.callRemote.assert_called_once_with(call.method) + t.assertIsInstance(dfr.result, Failure) + actual_error = dfr.result.value + t.assertIsInstance(actual_error, ValueError) + t.assertIs(actual_error, expected_error) + + # add an errback to silence the unhandled deferred error message + dfr.addErrback(lambda x: None) diff --git a/Products/ZenHub/server/tests/test_workerpool.py b/Products/ZenHub/server/tests/test_workerpool.py index ddc353f0d5..7d8741d6e0 100644 --- a/Products/ZenHub/server/tests/test_workerpool.py +++ b/Products/ZenHub/server/tests/test_workerpool.py @@ -10,16 +10,15 @@ from __future__ import absolute_import from unittest import TestCase -from mock import Mock, patch +from mock import MagicMock, Mock, patch from twisted.internet import defer from twisted.python.failure import Failure from Products.ZenHub.server.service import ServiceCall +from ..worker import Worker from ..workerpool import ( - RemoteServiceRegistry, WorkerPool, - WorkerRef, WorkerAvailabilityQueue, ) @@ -27,402 +26,227 @@ class WorkerPoolTest(TestCase): # noqa: D101 - def setUp(self): - self.queue = "default" - self.pool = WorkerPool(self.queue) + def setUp(t): + t.queue = "default" + t.pool = WorkerPool(t.queue) + t.worker1 = MagicMock(Worker, autospec=True) + t.worker1.name = "default_1" + t.worker2 = MagicMock(Worker, autospec=True) + t.worker2.name = "default_2" - def test_name_property(self): - self.assertEqual(self.queue, self.pool.name) + def test_name_property(t): + t.assertEqual(t.queue, t.pool.name) - def test_add_workers(self): - worker1 = Mock(workerId=1, sessionId="1") - worker2 = Mock(workerId=2, sessionId="2") + def test_add_workers_not_ready(t): + t.pool.add(t.worker1) + t.assertIn(t.worker1, t.pool) - self.pool.add(worker1) - self.assertIn(worker1, self.pool) + t.pool.add(t.worker2) + t.assertIn(t.worker2, t.pool) - self.pool.add(worker2) - self.assertIn(worker2, self.pool) + t.assertEqual(2, len(t.pool)) + t.assertEqual(0, t.pool.available) - def test_add_WorkerRef(self): - worker = Mock(workerId=1, sessionId="1") - services = Mock() - workerref = WorkerRef(worker, services) + def test_add_worker_twice_not_ready(t): + t.pool.add(t.worker1) + t.assertIn(t.worker1, t.pool) - with self.assertRaises(AssertionError): - self.pool.add(workerref) + t.pool.add(t.worker1) + t.assertIn(t.worker1, t.pool) - def test_add_worker_twice(self): - worker = Mock(workerId=1, sessionId="1") + t.assertEqual(len(t.pool), 1) + t.assertEqual(0, t.pool.available) - self.pool.add(worker) - self.assertIn(worker, self.pool) + def test_ready(t): + t.pool.add(t.worker1) + t.pool.add(t.worker2) - self.pool.add(worker) - self.assertIn(worker, self.pool) - self.assertEqual(len(self.pool), 1) + t.pool.ready(t.worker1) + t.assertEqual(1, t.pool.available) + t.assertIn(t.worker1, t.pool) - def test_add_duplicate_worker(self): - worker = Mock(workerId=1, sessionId="1") - worker_dup = Mock(workerId=1, sessionId="2") + t.pool.ready(t.worker2) + t.assertEqual(2, t.pool.available) + t.assertIn(t.worker2, t.pool) - self.pool.add(worker) - self.assertIn(worker, self.pool) + def test_ready_after_replace(t): + worker_replacement = MagicMock(Worker, autospec=True) + worker_replacement.name = t.worker1.name - self.pool.add(worker_dup) - self.assertIn(worker_dup, self.pool) - self.assertIn(worker, self.pool) + t.pool.add(t.worker1) + t.pool.ready(t.worker1) + t.assertIn(t.worker1, t.pool) + t.assertEqual(1, t.pool.available) + t.assertNotIn(worker_replacement, t.pool) - def test_remove_worker(self): - worker1 = Mock(workerId=1, sessionId="1") - worker2 = Mock(workerId=2, sessionId="2") - self.pool.add(worker1) - self.pool.add(worker2) - self.assertEqual(len(self.pool), 2) + t.pool.add(worker_replacement) + t.assertEqual(len(t.pool), 1) + t.assertIn(worker_replacement, t.pool) + t.assertNotIn(t.worker1, t.pool) + t.assertEqual(0, t.pool.available) - self.pool.remove(worker1) - self.assertNotIn(worker1, self.pool) - self.assertIn(worker2, self.pool) - self.assertEqual(len(self.pool), 1) + t.pool.ready(t.worker1) + t.assertEqual(1, t.pool.available) - self.pool.remove(worker2) - self.assertNotIn(worker1, self.pool) - self.assertNotIn(worker2, self.pool) - self.assertEqual(len(self.pool), 0) + def test_remove_without_ready(t): + t.pool.add(t.worker1) + t.pool.add(t.worker2) + t.assertEqual(len(t.pool), 2) - def test_remove_WorkerRef(self): - worker = Mock(workerId=1, sessionId="1") - services = Mock() - workerref = WorkerRef(worker, services) + t.pool.remove(t.worker1) + t.assertNotIn(t.worker1, t.pool) + t.assertIn(t.worker2, t.pool) + t.assertEqual(len(t.pool), 1) - with self.assertRaises(AssertionError): - self.pool.add(workerref) + t.pool.remove(t.worker2) + t.assertNotIn(t.worker1, t.pool) + t.assertNotIn(t.worker2, t.pool) + t.assertEqual(len(t.pool), 0) - def test_available(self): - worker = Mock(workerId=1, sessionId="1") - self.assertEqual(self.pool.available, 0) + def test_available(t): + t.assertEqual(t.pool.available, 0) - self.pool.add(worker) - self.assertEqual(self.pool.available, 1) + t.pool.add(t.worker1) + t.assertEqual(t.pool.available, 0) - self.pool.remove(worker) - self.assertEqual(self.pool.available, 0) + t.pool.ready(t.worker1) + t.assertEqual(t.pool.available, 1) - def test_iter_protocol(self): - w = next(iter(self.pool), None) - self.assertIsNone(w) + t.pool.remove(t.worker1) + t.assertEqual(t.pool.available, 0) - worker = Mock(workerId=1, sessionId="1") - self.pool.add(worker) - w = next(iter(self.pool), None) - self.assertIsNotNone(w) - self.assertIsInstance(w, WorkerRef) - self.assertIs(w.ref, worker) + def test_iter_protocol(t): + w = next(iter(t.pool), None) + t.assertIsNone(w) - def test_hire(self): - worker = Mock(workerId=1, sessionId="1") - worker.callRemote.return_value = defer.succeed("pong") + t.pool.add(t.worker1) + w = next(iter(t.pool), None) + t.assertIsNotNone(w) + t.assertIsInstance(w, Worker) + t.assertIs(w, t.worker1) - self.pool.add(worker) + def test_hire(t): + t.worker1.remote.callRemote.return_value = defer.succeed("pong") - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) + t.pool.add(t.worker1) + t.pool.ready(t.worker1) def assign(x, v): x.value = v - dfr = self.pool.hire() + dfr = t.pool.hire() hired_worker = dfr.result - self.assertIsInstance(hired_worker, WorkerRef) - self.assertIs(hired_worker.ref, worker) - self.assertEqual(self.pool.available, 0) - self.assertEqual(len(self.pool), 1) + t.assertIsInstance(hired_worker, Worker) + t.assertIs(hired_worker, t.worker1) + t.assertEqual(t.pool.available, 0) + t.assertEqual(len(t.pool), 1) - def test_remove_after_hire(self): - worker = Mock(workerId=1, sessionId="1") - self.pool.add(worker) + def test_remove_after_hire(t): + t.pool.add(t.worker1) + t.pool.ready(t.worker1) - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) + t.assertEqual(t.pool.available, 1) + t.assertEqual(len(t.pool), 1) - dfr = self.pool.hire() + dfr = t.pool.hire() hired_worker = dfr.result - self.pool.remove(hired_worker.ref) + t.pool.remove(hired_worker) - self.assertEqual(self.pool.available, 0) - self.assertEqual(len(self.pool), 0) + t.assertEqual(t.pool.available, 0) + t.assertEqual(len(t.pool), 0) - def test_hire_no_workers(self): - self.assertEqual(self.pool.available, 0) - self.assertEqual(len(self.pool), 0) + def test_hire_no_workers(t): + t.assertEqual(t.pool.available, 0) + t.assertEqual(len(t.pool), 0) - dfr = self.pool.hire() + dfr = t.pool.hire() # The deferred returned from the pool has not been called - self.assertFalse(dfr.called) - - def test_wait_for_available_worker(self): - self.assertEqual(self.pool.available, 0) - self.assertEqual(len(self.pool), 0) + t.assertFalse(dfr.called) - dfr = self.pool.hire() + def test_wait_for_available_worker(t): + dfr = t.pool.hire() # The deferred returned from the pool has not been called - self.assertFalse(dfr.called) + t.assertFalse(dfr.called) # a worker becomes available - worker = Mock(workerId=1, sessionId="1") - worker.callRemote.return_value = defer.succeed("pong") - self.pool.add(worker) - - # the deferred is called, and the worker_reference_object is its result - self.assertTrue(dfr.called) - worker_ref = dfr.result - self.assertIsInstance(worker_ref, WorkerRef) + t.worker1.remote.callRemote.return_value = defer.succeed("pong") + t.pool.add(t.worker1) + + # Still not called because 'ready' hasn't been called. + t.assertFalse(dfr.called) + + t.pool.ready(t.worker1) + + # the deferred is called, and worker_ref is its result + t.assertTrue(dfr.called) + worker = dfr.result + t.assertIsInstance(worker, Worker) # the reference object contains the worker - self.assertIs(worker_ref.ref, worker) + t.assertIs(worker, t.worker1) - def test_hire_no_available_workers(self): + def test_hire_no_available_workers(t): with patch.object( WorkerPool, "available", return_value=0 ) as available: - pool = WorkerPool(self.queue) - worker = Mock(workerId=1, sessionId="1") - pool.add(worker) + pool = WorkerPool(t.queue) + pool.add(t.worker1) available.__len__.return_value = 0 - self.assertEqual(len(pool), 1) + t.assertEqual(len(pool), 1) - dfr = self.pool.hire() - self.assertFalse(dfr.called) + dfr = t.pool.hire() + t.assertFalse(dfr.called) available.pop.assert_not_called() - def test_layoff(self): - worker = Mock(workerId=1, sessionId="1") - self.pool.add(worker) + def test_ready_after_hire(t): + t.pool.add(t.worker1) + t.pool.ready(t.worker1) + + dfr = t.pool.hire() + hired_worker = dfr.result + + t.assertEqual(0, t.pool.available) + t.assertEqual(1, len(t.pool)) - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) + t.pool.ready(hired_worker) - dfr = self.pool.hire() + t.assertEqual(t.pool.available, 1) + t.assertEqual(len(t.pool), 1) + + def test_ready_retired_worker(t): + t.pool.add(t.worker1) + t.pool.ready(t.worker1) + + dfr = t.pool.hire() hired_worker = dfr.result - self.assertEqual(self.pool.available, 0) - self.assertEqual(len(self.pool), 1) + t.assertEqual(t.pool.available, 0) + t.assertEqual(len(t.pool), 1) - self.pool.layoff(hired_worker) + t.pool.remove(t.worker1) - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) + worker2 = MagicMock(Worker, autospec=True) + worker2.name = t.worker1.name - def test_layoff_retired_worker(self): - worker = Mock(workerId=1, sessionId="1") - self.pool.add(worker) + t.pool.add(worker2) + t.pool.ready(worker2) + t.assertEqual(t.pool.available, 1) + t.assertEqual(len(t.pool), 1) - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) + t.pool.ready(hired_worker) + t.assertEqual(t.pool.available, 1) + t.assertEqual(len(t.pool), 1) - dfr = self.pool.hire() - hired_worker = dfr.result + def test_handleReportStatus(t): + t.pool.add(t.worker1) + t.pool.add(t.worker2) + + t.pool.handleReportStatus(event=None) - self.assertEqual(self.pool.available, 0) - self.assertEqual(len(self.pool), 1) - - worker2 = Mock(workerId=1, sessionId="2") - self.pool.remove(worker) - self.pool.add(worker2) - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) - - self.pool.layoff(hired_worker) - self.assertEqual(self.pool.available, 1) - self.assertEqual(len(self.pool), 1) - - def test_handleReportStatus(self): - worker_1 = Mock(name="worker_1") - worker_2 = Mock(name="worker_2") - self.pool.add(worker_1) - self.pool.add(worker_2) - - self.pool.handleReportStatus(event=None) - - worker_1.callRemote.assert_called_with("reportStatus") - worker_2.callRemote.assert_called_with("reportStatus") - - -class RemoteServiceRegistryTest(TestCase): # noqa: D101 - def setUp(self): - self.worker = Mock(workerId=1, sessionId="1") - self.registry = RemoteServiceRegistry(self.worker) - - def test_api(self): - name = "service" - monitor = "monitor" - service = Mock() - self.worker.callRemote.return_value = service - - svc = self.registry.get((name, monitor)) - self.assertIsNone(svc) - self.assertNotIn((name, monitor), self.registry) - - dfr = self.registry.lookup(name, monitor) - - self.assertIsInstance(dfr, defer.Deferred) - self.assertTrue(dfr.called) - self.assertIs(dfr.result, service) - - svc = self.registry.get((name, monitor)) - self.assertIs(svc, service) - self.assertIn((name, monitor), self.registry) - - # Note: 'callRemote' is called only once per (service, method). - self.worker.callRemote.assert_called_once_with( - "getService", - name, - monitor, - ) - - -class WorkerRefTest(TestCase): # noqa: D101 - def setUp(self): - self.getLogger_patcher = patch( - "{src}.getLogger".format(**PATH), - autospec=True, - ) - self.getLogger = self.getLogger_patcher.start() - self.addCleanup(self.getLogger_patcher.stop) - self.logger = self.getLogger.return_value - self.worker = Mock(workerId=1, sessionId="1") - self.services = Mock(spec=RemoteServiceRegistry) - self.ref = WorkerRef(self.worker, self.services) - - def test_properties(self): - self.assertEqual(self.ref.ref, self.worker) - self.assertEqual(self.ref.services, self.services) - - def test___getattr__(self): - self.assertEqual(self.ref.workerId, self.worker.workerId) - - def test_run_no_arg_method(self): - service = Mock(spec=["callRemote"]) - self.services.lookup.return_value = service - call = ServiceCall( - monitor="localhost", - service="service", - method="method", - args=[], - kwargs={}, - ) - expected_result = service.callRemote.return_value - - dfr = self.ref.run(call) - - self.assertIsInstance(dfr, defer.Deferred) - self.assertTrue(dfr.called) - self.assertEqual(dfr.result, expected_result) - self.services.lookup.assert_called_once_with( - call.service, - call.monitor, - ) - service.callRemote.assert_called_once_with(call.method) - - def test_run_method_with_args(self): - service = Mock(spec=["callRemote"]) - self.services.lookup.return_value = service - call = ServiceCall( - monitor="localhost", - service="service", - method="method", - args=["arg"], - kwargs={"arg": 1}, - ) - expected_result = service.callRemote.return_value - - dfr = self.ref.run(call) - - self.assertIsInstance(dfr, defer.Deferred) - self.assertTrue(dfr.called) - self.assertEqual(dfr.result, expected_result) - self.services.lookup.assert_called_once_with( - call.service, - call.monitor, - ) - service.callRemote.assert_called_once_with( - call.method, - call.args[0], - arg=call.kwargs["arg"], - ) - - def test_run_lookup_failure(self): - expected_error = ValueError("boom") - self.services.lookup.side_effect = expected_error - call = ServiceCall( - monitor="localhost", - service="the_service", - method="method", - args=[], - kwargs={}, - ) - - result = [] - dfr = self.ref.run(call) - dfr.addErrback(lambda x: result.append(x)) - - self.services.lookup.assert_called_once_with( - call.service, - call.monitor, - ) - self.assertEqual(len(result), 1) - failure = result[0] - self.assertIsInstance(failure, Failure) - actual_error = failure.value - self.assertIsInstance(actual_error, ValueError) - self.logger.error.assert_called_once_with( - "Failed to retrieve remote service " - "service=%s worker=%s error=(%s) %s", - call.service, - 1, - "ValueError", - expected_error, - ) - - def test_run_callremote_failure(self): - service = Mock(spec=["callRemote"]) - self.services.lookup.return_value = service - call = ServiceCall( - monitor="localhost", - service="service", - method="method", - args=[], - kwargs={}, - ) - expected_error = ValueError("boom") - service.callRemote.side_effect = expected_error - - result = [] - dfr = self.ref.run(call) - dfr.addErrback(lambda x: result.append(x)) - - self.services.lookup.assert_called_once_with( - call.service, - call.monitor, - ) - service.callRemote.assert_called_once_with(call.method) - self.assertEqual(len(result), 1) - failure = result[0] - self.assertIsInstance(failure, Failure) - actual_error = failure.value - self.assertIsInstance(actual_error, ValueError) - self.logger.error.assert_called_once_with( - "Failed to execute remote method " - "service=%s method=%s id=%s worker=%s error=(%s) %s", - call.service, - call.method, - call.id.hex, - 1, - "ValueError", - expected_error, - ) + t.worker1.remote.callRemote.assert_called_with("reportStatus") + t.worker2.remote.callRemote.assert_called_with("reportStatus") class WorkerAvailabilityQueueTest(TestCase): diff --git a/Products/ZenHub/server/utils.py b/Products/ZenHub/server/utils.py index 583dbedf49..4f2a2167de 100644 --- a/Products/ZenHub/server/utils.py +++ b/Products/ZenHub/server/utils.py @@ -11,10 +11,13 @@ import contextlib import importlib +import logging import sys import traceback +import types + +import six -from Products.ZenUtils.Logger import getLogger as _getLogger from Products.ZenUtils.Utils import ipv6_available from .exceptions import UnknownServiceError @@ -47,10 +50,17 @@ def with_port(port): def getLogger(cls): - if isinstance(cls, basestring): + if isinstance(cls, six.string_types): name = cls.split(".")[-1] - return _getLogger("zenhub.server." + name) - return _getLogger("zenhub.server", cls) + else: + if isinstance(cls, types.InstanceType): + cls = cls.__class__ + elif isinstance(cls, types.ModuleType): + pass # Avoid matching the next elif statement. + elif not isinstance(cls, types.TypeType): + cls = type(cls) + name = cls.__name__.split(".")[-1] + return logging.getLogger("zen.zenhub.server.{}".format(name.lower())) def import_service_class(clspath): diff --git a/Products/ZenHub/server/worker.py b/Products/ZenHub/server/worker.py new file mode 100644 index 0000000000..f08a3ef021 --- /dev/null +++ b/Products/ZenHub/server/worker.py @@ -0,0 +1,118 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2019, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +import attr + +from attr.validators import instance_of +from twisted.internet import defer +from twisted.spread import pb + +from Products.ZenHub.errors import RemoteException + +from .service import ServiceRegistry +from .utils import getLogger + + +@attr.s(slots=True, frozen=True) +class Worker(object): + """ + Wraps zenhubworker RemoteReference objects. + """ + + name = attr.ib(converter=str) + """Name of the zenhubworker""" + + remote = attr.ib(validator=instance_of(pb.RemoteReference)) + """Remote reference to the zenhubworker""" + + services = attr.ib(factory=ServiceRegistry) + """Mapping of ZenHub service references used by this worker""" + + @defer.inlineCallbacks + def run(self, call): + """Execute the call. + + @param call: Details on the RPC method to invoke. + @type call: ServiceCall + @raises Exception if an error occurs while attempting to + execute a remote procedure call. An RPC error may occur while + retrieving the remote service reference or when invoking the + job specified method on the remote service reference. + """ + log = getLogger(self) + service = yield self._get_service(call.service, call.monitor, log) + try: + result = yield service.callRemote( + call.method, *call.args, **call.kwargs + ) + log.debug( + "remote method executed service=%s method=%s id=%s worker=%s", + call.service, + call.method, + call.id.hex, + self.name, + ) + defer.returnValue(result) + except (RemoteException, pb.RemoteError) as ex: + if log.isEnabledFor(logging.DEBUG): + log.error( + "remote method failed " + "service=%s method=%s id=%s worker=%s error=%s", + call.service, + call.method, + call.id.hex, + self.name, + ex, + ) + raise + except Exception as ex: + if log.isEnabledFor(logging.DEBUG): + log.error( + "failed to execute remote method " + "service=%s method=%s id=%s worker=%s error=(%s) %s", + call.service, + call.method, + call.id.hex, + self.name, + ex.__class__.__name__, + ex, + ) + raise + + @defer.inlineCallbacks + def _get_service(self, service, monitor, log): + """Retrieve a service reference asynchronously.""" + svcref = self.services.get(monitor, service) + if svcref is None: + try: + svcref = yield self.remote.callRemote( + "getService", service, monitor + ) + self.services.add(monitor, service, svcref) + log.debug( + "retrieved remote service service=%s worker=%s", + service, + self.name, + ) + except Exception as ex: + if log.isEnabledFor(logging.DEBUG): + log.error( + "failed to retrieve remote service " + "service=%s worker=%s error=(%s) %s", + service, + self.name, + ex.__class__.__name__, + ex, + ) + raise + defer.returnValue(svcref) diff --git a/Products/ZenHub/server/workerpool.py b/Products/ZenHub/server/workerpool.py index 70a8631a6d..f6d620865a 100644 --- a/Products/ZenHub/server/workerpool.py +++ b/Products/ZenHub/server/workerpool.py @@ -9,36 +9,34 @@ from __future__ import absolute_import -import collections -import logging +from collections import Container, Iterable, Sized from twisted.internet import defer from twisted.spread import pb from zope.component import adapter, provideHandler -from Products.ZenHub.PBDaemon import RemoteException - from .events import ReportWorkerStatus from .utils import getLogger -class WorkerPool( - collections.Container, collections.Iterable, collections.Sized -): +class WorkerPool(Container, Iterable, Sized): """Pool of ZenHubWorker RemoteReference objects.""" def __init__(self, name): """Initialize a WorkerPool instance. - ZenHubWorker will specify a "queue" to accept tasks from. The - name of the queue is given by the 'name' parameter. + ZenHubWorker will specify a "worklist" to accept tasks from. The + name of the worklist is given by the 'name' parameter. - :param str name: Name of the "queue" associated with this pool. + :param str name: Name of the "worklist" associated with this pool. """ # __available contains workers (by ID) available for work self.__available = WorkerAvailabilityQueue() - self.__workers = {} # Worker refs by worker.sessionId - self.__services = {} # Service refs by worker.sessionId + + # Worker refs by worker.name + # type: {str: Worker} + self.__workers = {} + self.__name = name self.__log = getLogger(self) # Declare a handler for ReportWorkerStatus events @@ -48,99 +46,66 @@ def __init__(self, name): def name(self): return self.__name - def add(self, worker): + def get(self, name, default=None): # type: (name: str) -> Worker | None + """Return the worker having the given name.""" + return self.__workers.get(name, default) + + def add(self, worker): # type: (worker: Worker) -> None """Add a worker to the pool. - Note: the worker is expected to have both 'workerId' and - 'sessionId' attributes. + The added worker will replace any existing worker that has the + same `name` attribute value. - @param worker {RemoteReference} A reference to the remote worker + Added workers are not available to hire until `ready` is called. + + @type worker: Products.ZenHub.server.worker.Worker """ - assert not isinstance( - worker, WorkerRef - ), "worker may not be a WorkerRef" - sessionId = worker.sessionId - if sessionId in self.__workers: - self.__log.debug( - "Worker already registered worker=%s", worker.workerId - ) + name = worker.name + replaced = False + stored_worker = self.__workers.get(name) + if stored_worker is worker: return - self.__workers[sessionId] = worker - self.__services[sessionId] = RemoteServiceRegistry(worker) - self.__available.add(sessionId) + elif stored_worker is not None: + replaced = True + self.__discard(name) + self.__workers[name] = worker self.__log.debug( - "Worker registered worker=%s total-workers=%s", - worker.workerId, + "%s worker worker=%s total-workers=%s", + "added" if not replaced else "replaced", + worker.name, len(self.__workers), ) def remove(self, worker): """Remove a worker from the pool. - Note: the worker is expected to have both 'workerId' and - 'sessionId' attributes. - @param worker {RemoteReference} A reference to the remote worker """ - assert not isinstance( - worker, WorkerRef - ), "worker may not be a WorkerRef" - sessionId = worker.sessionId - self.__remove(sessionId, worker=worker) - - def __remove(self, sessionId, worker=None): - if sessionId not in self.__workers: + stored_worker = self.__workers.get(worker.name) + if stored_worker is not worker: self.__log.debug( - "Worker not registered worker=%s", worker.workerId + "cannot remove unknown worker worker=%s", worker.name ) return - if worker is None: - worker = self.__workers[sessionId] - del self.__workers[sessionId] - del self.__services[sessionId] - self.__available.discard(sessionId) + self.__discard(worker.name) self.__log.debug( - "Worker unregistered worker=%s total-workers=%s", - worker.workerId, + "removed worker worker=%s total-workers=%s", + worker.name, len(self.__workers), ) - def __contains__(self, worker): - """Return True if worker is registered, else False is returned. - - Note: the worker is expected to have a 'sessionId' attribute. - - @param worker {RemoteReference} A reference to the remote worker - """ - return self.__workers.get(worker.sessionId) is worker - - def __len__(self): - return len(self.__workers) - - def __iter__(self): - return ( - self.__makeref(worker) - for worker in self.__workers.itervalues() - if worker is not None - ) - - @adapter(ReportWorkerStatus) - def handleReportStatus(self, event): - """Instructs workers to report their status. + def ready(self, worker): + """Make a worker available to hire. - Returns a DeferredList that fires when all the workers have - completed reporting their status. + @param worker: A reference to the remote worker + @type worker: RemoteReference """ - deferreds = [] - for worker in self.__workers.viewvalues(): - dfr = worker.callRemote("reportStatus") - dfr.addErrback( - lambda ex: self.__log.error( - "Failed to report status (%s): %s", worker.workerId, ex - ), - ) - deferreds.append(dfr) - return defer.DeferredList(deferreds) + if worker.name not in self.__workers: + self.__log.debug("retired worker=%s", worker.name) + return + if worker.name not in self.__available: + self.__available.add(worker.name) + self.__log.debug("available to hire worker=%s", worker.name) @property def available(self): @@ -148,172 +113,86 @@ def available(self): return len(self.__available) @defer.inlineCallbacks - def hire(self): + def hire(self): # type: () -> Worker """Return a valid worker. This method blocks until a worker is available. """ while True: - sessionId = yield self.__available.pop() + name = yield self.__available.pop() try: - worker = self.__workers[sessionId] + worker = self.__workers[name] # Ping the worker to test whether it still exists - yield worker.callRemote("ping") - except (pb.PBConnectionLost, pb.DeadReferenceError) as ex: + yield worker.remote.callRemote("ping") + except KeyError: + self.__log.error( + "available worker doesn't exist worker=%s", name + ) + except ( + pb.RemoteError, + pb.PBConnectionLost, + pb.DeadReferenceError, + ) as ex: msg = _bad_worker_messages.get(type(ex)) - self.__log.error(msg, worker.workerId) - self.__remove(sessionId, worker=worker) + self.__log.warning(msg, worker.name) + self.__discard(name) except Exception: - self.__log.exception("Unexpected error") - self.__remove(sessionId) + self.__log.exception("unexpected error") + self.__discard(name) else: - self.__log.debug("Worker hired worker=%s", worker.workerId) - defer.returnValue(self.__makeref(worker)) - - def layoff(self, workerref): - """Make the worker available for hire.""" - worker = workerref.ref - # Verify the worker is the same instance before making it - # available for hire again. - worker_p = self.__workers.get(worker.sessionId) - if worker_p: - self.__log.debug("Worker layed off worker=%s", worker.workerId) - self.__available.add(worker.sessionId) - else: - self.__log.debug("Worker retired worker=%s", worker.workerId) - - def __makeref(self, worker): - return WorkerRef(worker, self.__services[worker.sessionId]) - - -_bad_worker_messages = { - pb.PBConnectionLost: "Worker failed ping test worker=%s", - pb.DeadReferenceError: ( - "Worker no longer available (dead reference) worker=%s" - ), -} - + self.__log.debug("hired worker worker=%s", worker.name) + defer.returnValue(worker) -class RemoteServiceRegistry(object): - """Registry of RemoteReferences to services in zenhubworker.""" - - def __init__(self, worker): - """Initialize a RemoteServiceRegistry instance. + @adapter(ReportWorkerStatus) + def handleReportStatus(self, event): + """Instructs workers to report their status. - :param worker: The ZenHubWorker reference - :type worker: pb.RemoteReference + Returns a DeferredList that fires when all the workers have + completed reporting their status. """ - self.__services = {} # (service-name, monitor): service-ref - self.__worker = worker - - def get(self, key, default=None): - return self.__services.get(key, default) - - def __contains__(self, key): - return key in self.__services - - @defer.inlineCallbacks - def lookup(self, service, monitor): - """Retrieve a service reference asynchronously.""" - remoteRef = self.__services.get((service, monitor)) - if remoteRef is None: - remoteRef = yield self.__worker.callRemote( - "getService", service, monitor + deferreds = [] + for worker in self.__workers.viewvalues(): + dfr = worker.remote.callRemote("reportStatus") + dfr.addErrback( + lambda ex, name=worker.name: self.__log.error( + "Failed to report status (%s): %s", name, ex + ), ) - self.__services[(service, monitor)] = remoteRef - defer.returnValue(remoteRef) - + deferreds.append(dfr) + return defer.DeferredList(deferreds) -class WorkerRef(object): - """Wrapper around zenhubworker RemoteReference objects. + def __discard(self, name): + if name in self.__workers: + del self.__workers[name] + self.__available.discard(name) - Used to simplify access to the services associated with a worker. - """ + def __contains__(self, worker): + """Return True if worker is present, else False is returned. - def __init__(self, worker, services): - """ """ - self.__worker = worker - self.__services = services - self.__log = getLogger(self) + Note: the worker is expected to have a 'name' attribute. - @property - def ref(self): - return self.__worker + @param worker {RemoteReference} A reference to the remote worker + """ + return self.__workers.get(worker.name) is worker - @property - def services(self): - return self.__services + def __len__(self): + return len(self.__workers) - def __getattr__(self, name): - return getattr(self.__worker, name) + def __iter__(self): + return ( + worker + for worker in self.__workers.itervalues() + if worker is not None + ) - @defer.inlineCallbacks - def run(self, call): - """Execute the call. - - @param job {ServiceCall} Details on the RPC method to invoke. - @raises Exception if an error occurs while attempting to - execute a remote procedure call. An RPC error may occur while - retrieving the remote service reference or when invoking the - job specified method on the remote service reference. - """ - try: - service = yield self.__services.lookup(call.service, call.monitor) - self.__log.debug( - "Retrieved remote service service=%s id=%s worker=%s", - call.service, - call.id, - self.__worker.workerId, - ) - except Exception as ex: - if self.__log.isEnabledFor(logging.DEBUG): - self.__log.error( - "Failed to retrieve remote service " - "service=%s worker=%s error=(%s) %s", - call.service, - self.__worker.workerId, - ex.__class__.__name__, - ex, - ) - raise - try: - result = yield service.callRemote( - call.method, *call.args, **call.kwargs - ) - self.__log.debug( - "Executed remote method service=%s method=%s id=%s worker=%s", - call.service, - call.method, - call.id.hex, - self.__worker.workerId, - ) - defer.returnValue(result) - except (RemoteException, pb.RemoteError) as ex: - if self.__log.isEnabledFor(logging.DEBUG): - self.__log.error( - "Remote method failed " - "service=%s method=%s id=%s worker=%s error=%s", - call.service, - call.method, - call.id.hex, - self.__worker.workerId, - ex, - ) - raise - except Exception as ex: - if self.__log.isEnabledFor(logging.DEBUG): - self.__log.error( - "Failed to execute remote method " - "service=%s method=%s id=%s worker=%s error=(%s) %s", - call.service, - call.method, - call.id.hex, - self.__worker.workerId, - ex.__class__.__name__, - ex, - ) - raise +_bad_worker_messages = { + pb.PBConnectionLost: "worker failed ping test worker=%s", + pb.DeadReferenceError: ( + "worker no longer available (dead reference) worker=%s" + ), + pb.RemoteError: "worker is restarting worker=%s", +} class WorkerAvailabilityQueue(defer.DeferredQueue): @@ -322,6 +201,9 @@ class WorkerAvailabilityQueue(defer.DeferredQueue): def __len__(self): return len(self.pending) + def __contains__(self, item): + return item in self.pending + # Alias pop to get -- DeferredQueue.get removes the value from the queue. pop = defer.DeferredQueue.get diff --git a/Products/ZenHub/services/CommandPerformanceConfig.py b/Products/ZenHub/services/CommandPerformanceConfig.py index 06fe0f0047..25d46e6cc9 100644 --- a/Products/ZenHub/services/CommandPerformanceConfig.py +++ b/Products/ZenHub/services/CommandPerformanceConfig.py @@ -7,13 +7,13 @@ # ############################################################################## -from __future__ import print_function - """CommandPerformanceConfig Provides configuration to zencommand clients. """ +from __future__ import print_function + import logging import traceback @@ -134,14 +134,13 @@ def _getComponentConfig(self, comp, device, perfServer, cmds): if not ds.enabled: continue - # Ignore SSH datasources if no username set useSsh = getattr(ds, "usessh", False) if useSsh and not device.zCommandUsername: + # Send an event about no username set self._warnUsernameNotSet(device) - continue - - # clear any lingering no-username events - self._clearUsernameNotSet(device) + else: + # clear any lingering no-username events + self._clearUsernameNotSet(device) parserName = getattr(ds, "parser", "Auto") ploader = getParserLoader(self.dmd, parserName) @@ -253,7 +252,6 @@ def _createDeviceProxy(self, device): if commands: proxy.datasources = list(commands) return proxy - return None def _sendCmdEvent( self, diff --git a/Products/ZenHub/services/DiscoverService.py b/Products/ZenHub/services/DiscoverService.py index a759455415..ca84b57fdf 100644 --- a/Products/ZenHub/services/DiscoverService.py +++ b/Products/ZenHub/services/DiscoverService.py @@ -18,13 +18,13 @@ from Products.Jobber.exceptions import NoSuchJobException from Products.ZenEvents.ZenEventClasses import Status_Ping -from Products.ZenHub.PBDaemon import translateError from Products.ZenModel.Device import manage_createDevice from Products.ZenModel.Exceptions import DeviceExistsError from Products.ZenRelations.ZenPropertyManager import iszprop from Products.ZenRelations.zPropertyCategory import getzPropertyCategory from Products.ZenUtils.IpUtil import strip, ipunwrap, isip +from ..errors import translateError from .ModelerService import ModelerService DEFAULT_PING_THRESH = 168 @@ -88,7 +88,10 @@ def getNetworkName(self): class DiscoverService(ModelerService): @translateError def remote_getNetworks(self, net, includeSubNets): - "Get network objects to scan networks should be in CIDR form 1.1.1.0/24" + """ + Get network objects to scan networks should be in + CIDR form 1.1.1.0/24 + """ netObj = self.dmd.Networks.getNetworkRoot().findNet(net) if not netObj: return None @@ -138,21 +141,22 @@ def sendIpStatusEvent(self, ipobj, sev=2): else: devname = comp = ip self.sendEvent( - dict( - device=devname, - ipAddress=ip, - eventKey=ip, - component=comp, - eventClass=Status_Ping, - summary=msg, - severity=sev, - agent="Discover", - ) + { + "device": devname, + "ipAddress": ip, + "eventKey": ip, + "component": comp, + "eventClass": Status_Ping, + "summary": msg, + "severity": sev, + "agent": "Discover", + } ) @translateError def remote_createDevice(self, ip, force=False, **kw): - """Create a device. + """ + Create a device. @param ip: The manageIp of the device @param kw: The args to manage_createDevice. @@ -175,72 +179,7 @@ def remote_createDevice(self, ip, force=False, **kw): kw["deviceName"] = ip kw["title"] = deviceName - from Products.ZenModel.Device import getNetworkRoot - - @transact - def _doDbWork(): - """ - return device object (either new or existing), and flag indicating - whether device was newly created, or just updated - """ - try: - netroot = getNetworkRoot( - self.dmd, kw.get("performanceMonitor", "localhost") - ) - netobj = netroot.getNet(ip) - netmask = 24 - if netobj is not None: - netmask = netobj.netmask - else: - defaultNetmasks = getattr( - netroot, "zDefaultNetworkTree", [] - ) - if defaultNetmasks: - netmask = defaultNetmasks[0] - autoDiscover = getattr(netobj, "zAutoDiscover", True) - # If we're not supposed to discover this IP, return None - if not force and not autoDiscover: - return None, False - kw["manageIp"] = ipunwrap(ip) - dev = manage_createDevice(self.dmd, **kw) - netroot.createIp(ip, netmask) - return dev, True - except DeviceExistsError as e: - # Update device with latest info from zendisc - # (if necessary) - if not e.dev.getManageIp(): - e.dev.setManageIp(kw["manageIp"]) - - # only overwrite title if it has not been set - if not e.dev.title or isip(e.dev.title): - if not isip(kw.get("deviceName")): - e.dev.setTitle(kw["deviceName"]) - - # copy kw->updateAttributes, to keep kw intact in case - # we need to retry transaction - updateAttributes = {} - for k, v in kw.items(): - if k not in ( - "manageIp", - "deviceName", - "devicePath", - "discoverProto", - "performanceMonitor", - "productionState", - ): - updateAttributes[k] = v - # use updateDevice so we don't clobber existing device properties. - e.dev.updateDevice(**updateAttributes) - return e.dev, False - except Exception as ex: - log.exception( - "IP address %s (kw = %s) encountered error", - ipunwrap(ip), - kw, - ) - raise pb.CopyableFailure(ex) - - dev, deviceIsNew = _doDbWork() + dev, deviceIsNew = _update_device(self.dmd, ip, force, **kw) if dev is not None: return self.createDeviceProxy(dev), deviceIsNew else: @@ -302,4 +241,93 @@ def remote_moveDevice(self, dev, path): @translateError def remote_getDefaultNetworks(self): monitor = self.dmd.Monitors.Performance._getOb(self.instance) - return [net for net in monitor.discoveryNetworks] + return list(monitor.discoveryNetworks) + + @translateError + def remote_removeInterfaces(self, net): + """ + Remove IPs for particular network + already assigned to interfaces (device components) + + @param net - network to discover + @return: a list of IPs without addresses assigned for interfaces + @rtype: list + """ + + full_ip_list = net.fullIpList() + + for d in self.dmd.Devices.getSubDevicesGen(): + for interface in d.os.interfaces(): + for addr in interface.ipaddresses(): + ip = addr.getIp() + if net.netmask == addr.netmask and ip in full_ip_list: + full_ip_list.remove(ip) + + return full_ip_list + + +@transact +def _update_device(dmd, ip, force, **kw): + """ + return device object (either new or existing), and flag indicating + whether device was newly created, or just updated + """ + from Products.ZenModel.Device import getNetworkRoot + + try: + monitor = kw.get("performanceMonitor", "localhost") + netroot = getNetworkRoot(dmd, monitor) + netobj = netroot.getNet(ip) + netmask = _get_netmask(netroot, netobj) + autoDiscover = getattr(netobj, "zAutoDiscover", True) + # If we're not supposed to discover this IP, return None + if not force and not autoDiscover: + return (None, False) + kw["manageIp"] = ipunwrap(ip) + dev = manage_createDevice(dmd, **kw) + netroot.createIp(ip, netmask) + return dev, True + except DeviceExistsError as e: + # Update device with latest info from zendisc + # (if necessary) + if not e.dev.getManageIp(): + e.dev.setManageIp(kw["manageIp"]) + + # only overwrite title if it has not been set + if not e.dev.title or isip(e.dev.title): + if not isip(kw.get("deviceName")): + e.dev.setTitle(kw["deviceName"]) + + # copy kw->updateAttributes, to keep kw intact in case + # we need to retry transaction + updateAttributes = {} + for k, v in kw.items(): + if k not in ( + "manageIp", + "deviceName", + "devicePath", + "discoverProto", + "performanceMonitor", + "productionState", + ): + updateAttributes[k] = v + # use updateDevice so we don't clobber existing device + # properties. + e.dev.updateDevice(**updateAttributes) + return e.dev, False + except Exception as ex: + log.exception( + "IP address %s (kw = %s) encountered error", + ipunwrap(ip), + kw, + ) + raise pb.CopyableFailure(ex) + + +def _get_netmask(netroot, netobj): + if netobj is not None: + return netobj.netmask + defaultNetmasks = getattr(netroot, "zDefaultNetworkTree", []) + if defaultNetmasks: + return defaultNetmasks[0] + return 24 diff --git a/Products/ZenHub/services/EventService.py b/Products/ZenHub/services/EventService.py index 7bec21e60a..73b37632fa 100644 --- a/Products/ZenHub/services/EventService.py +++ b/Products/ZenHub/services/EventService.py @@ -13,8 +13,8 @@ from zenoss.protocols.services import ServiceConnectionError from Products.ZenEvents.Event import Event +from Products.ZenHub.errors import translateError from Products.ZenHub.HubService import HubService -from Products.ZenHub.PBDaemon import translateError from Products.Zuul import getFacade from .ThresholdMixin import ThresholdMixin @@ -26,7 +26,7 @@ class EventService(HubService, ThresholdMixin): def __init__(self, dmd, instance): HubService.__init__(self, dmd, instance) - self.config = self.dmd.Monitors.Performance._getOb(self.instance) + self.conf = self.dmd.Monitors.Performance._getOb(self.instance) @translateError def remote_sendEvent(self, evt): diff --git a/Products/ZenHub/services/ModelerService.py b/Products/ZenHub/services/ModelerService.py index 271abcceeb..c184c8da3e 100644 --- a/Products/ZenHub/services/ModelerService.py +++ b/Products/ZenHub/services/ModelerService.py @@ -22,7 +22,7 @@ from Products.DataCollector.Plugins import loadPlugins from Products.ZenCollector.interfaces import IConfigurationDispatchingFilter from Products.ZenEvents import Event -from Products.ZenHub.PBDaemon import translateError +from Products.ZenHub.errors import translateError from Products.ZenHub.services.PerformanceConfig import PerformanceConfig @@ -33,10 +33,6 @@ class ModelerService(PerformanceConfig): plugins = None - def __init__(self, dmd, instance): - PerformanceConfig.__init__(self, dmd, instance) - self.config = self.dmd.Monitors.Performance._getOb(self.instance) - def createDeviceProxy(self, dev, skipModelMsg=""): if self.plugins is None: self.plugins = {} diff --git a/Products/ZenHub/services/PerformanceConfig.py b/Products/ZenHub/services/PerformanceConfig.py index 1f586f1e0e..d4ded80e06 100644 --- a/Products/ZenHub/services/PerformanceConfig.py +++ b/Products/ZenHub/services/PerformanceConfig.py @@ -7,23 +7,25 @@ # ############################################################################## -from Acquisition import aq_parent -from twisted.internet import defer +import logging + +from pynetsnmp import usm +from pynetsnmp.twistedsnmp import AgentProxy from twisted.spread import pb from zope import component +from Products.ZenHub.errors import translateError from Products.ZenHub.HubService import HubService from Products.ZenHub.interfaces import IBatchNotifier -from Products.ZenHub.PBDaemon import translateError -from Products.ZenHub.zodb import onUpdate, onDelete -from Products.ZenModel.Device import Device +from Products.ZenHub.zodb import onUpdate from Products.ZenModel.PerformanceConf import PerformanceConf -from Products.ZenModel.privateobject import is_private from Products.ZenModel.ZenPack import ZenPack from .Procrastinator import Procrastinate from .ThresholdMixin import ThresholdMixin +log = logging.getLogger("zen.performanceconfig") + ATTRIBUTES = ( "id", "manageIp", @@ -57,67 +59,77 @@ def __init__(self, device): setattr(self, propertyName, getattr(device, propertyName, None)) self.id = device.id - def __cmp__(self, other): - for propertyName in ATTRIBUTES: - c = cmp(getattr(self, propertyName), getattr(other, propertyName)) - if c != 0: - return c - return 0 + def __eq__(self, other): + if not isinstance(other, SnmpConnInfo): + return False + if self is other: + return True + return all( + getattr(self, name) == getattr(other, name) for name in ATTRIBUTES + ) + + def __lt__(self, other): + if not isinstance(other, SnmpConnInfo): + return NotImplemented + if self is other: + return False + return any( + getattr(self, name) < getattr(other, name) for name in ATTRIBUTES + ) + + def __le__(self, other): + if not isinstance(other, SnmpConnInfo): + return NotImplemented + if self is other: + return True + return not any( + getattr(self, name) > getattr(other, name) for name in ATTRIBUTES + ) def summary(self): - result = "SNMP info for %s at %s:%s" % ( + result = "device=%s peer=%s:%s timeout=%s tries=%d version=%s" % ( self.id, self.manageIp, self.zSnmpPort, - ) - result += " timeout: %s tries: %d" % ( self.zSnmpTimeout, self.zSnmpTries, + self.zSnmpVer, ) - result += " version: %s " % (self.zSnmpVer) if "3" not in self.zSnmpVer: - result += " community: %s" % self.zSnmpCommunity + result += " community=%s" % self.zSnmpCommunity else: - result += " securityName: %s" % self.zSnmpSecurityName - result += " authType: %s" % self.zSnmpAuthType - result += " privType: %s" % self.zSnmpPrivType + result += ( + " securityName=%s authType=%s authPassword=%s" + " privType=%s privPassword=%s engineID=%s" + ) % ( + self.zSnmpSecurityName, + self.zSnmpAuthType, + "****" if self.zSnmpAuthPassword else "", + self.zSnmpPrivType, + "****" if self.zSnmpPrivPassword else "", + "****" if self.zSnmpEngineId else "", + ) return result - def createSession(self, protocol=None, allowCache=False): - "Create a session based on the properties" - from pynetsnmp.twistedsnmp import AgentProxy - - cmdLineArgs = [] + def createSession(self, protocol=None): + """Create a session based on the properties""" if "3" in self.zSnmpVer: - if self.zSnmpPrivType: - cmdLineArgs += ["-l", "authPriv"] - cmdLineArgs += ["-x", self.zSnmpPrivType] - cmdLineArgs += ["-X", self.zSnmpPrivPassword] - elif self.zSnmpAuthType: - cmdLineArgs += ["-l", "authNoPriv"] - else: - cmdLineArgs += ["-l", "noAuthNoPriv"] - if self.zSnmpAuthType: - cmdLineArgs += ["-a", self.zSnmpAuthType] - cmdLineArgs += ["-A", self.zSnmpAuthPassword] - if self.zSnmpEngineId: - cmdLineArgs += ["-e", self.zSnmpEngineId] - cmdLineArgs += ["-u", self.zSnmpSecurityName] - if hasattr(self, "zSnmpContext") and self.zSnmpContext: - cmdLineArgs += ["-n", self.zSnmpContext] - - # the parameter tries seems to really be retries so take one off - retries = max(self.zSnmpTries - 1, 0) - p = AgentProxy( - ip=self.manageIp, - port=self.zSnmpPort, + sec = usm.User( + self.zSnmpSecurityName, + auth=usm.Authentication( + self.zSnmpAuthType, self.zSnmpAuthPassword + ), + priv=usm.Privacy(self.zSnmpPrivType, self.zSnmpPrivPassword), + engine=self.zSnmpEngineId, + context=self.zSnmpContext, + ) + else: + sec = usm.Community(self.zSnmpCommunity, version=self.zSnmpVer) + p = AgentProxy.create( + (self.manageIp, self.zSnmpPort), + security=sec, timeout=self.zSnmpTimeout, - tries=retries, - snmpVersion=self.zSnmpVer, - community=self.zSnmpCommunity, - cmdLineArgs=cmdLineArgs, - protocol=protocol, - allowCache=allowCache, + retries=max(self.zSnmpTries - 1, 0), ) p.snmpConnInfo = self return p @@ -132,104 +144,29 @@ def __repr__(self): class PerformanceConfig(HubService, ThresholdMixin): def __init__(self, dmd, instance): HubService.__init__(self, dmd, instance) - self.config = self.dmd.Monitors.Performance._getOb(self.instance) + self.conf = self.dmd.Monitors.Performance._getOb(self.instance) self.procrastinator = Procrastinate(self.pushConfig) self._collectorMap = {} self._notifier = component.getUtility(IBatchNotifier) @translateError def remote_propertyItems(self): - return self.config.propertyItems() - - def notifyAll(self, device): - self.procrastinator.doLater(device) - - def pushConfig(self, device): - deferreds = [] - cfg = None - - cur_collector = device.perfServer.getRelatedId() - prev_collector = self._collectorMap.get(device.id, None) - self._collectorMap[device.id] = cur_collector - - # Always push config to currently assigned collector. - if cur_collector == self.instance: - cfg = self.getDeviceConfig(device) - - # Push a deleteDevice call if the device was previously assigned to - # this collector. - elif prev_collector and prev_collector == self.instance: - cfg = None - - # Don't do anything if this collector is not, and has not been involved - # with the device - else: - return defer.DeferredList(deferreds) - - for listener in self.listeners: - if cfg is None: - deferreds.append( - listener.callRemote("deleteDevice", device.id) - ) - else: - deferreds.append(self.sendDeviceConfig(listener, cfg)) - return defer.DeferredList(deferreds) - - def getDeviceConfig(self, device): - "How to get the config for a device" - return None - - def sendDeviceConfig(self, listener, config): - "How to send the config to a device, probably via callRemote" - pass + return self.conf.propertyItems() @onUpdate(PerformanceConf) - def perfConfUpdated(self, object, event): - if object.id == self.instance: + def perfConfUpdated(self, conf, event): + if conf.id == self.instance: for listener in self.listeners: - listener.callRemote("setPropertyItems", object.propertyItems()) + listener.callRemote("setPropertyItems", conf.propertyItems()) @onUpdate(ZenPack) - def zenPackUpdated(self, object, event): + def zenPackUpdated(self, zenpack, event): for listener in self.listeners: try: listener.callRemote( "updateThresholdClasses", self.remote_getThresholdClasses() ) except Exception: - self.log.warning("Error notifying a listener of new classes") - - @onUpdate(Device) - def deviceUpdated(self, object, event): - self.notifyAll(object) - - @onUpdate(None) # Matches all - def notifyAffectedDevices(self, object, event): - if isinstance(object, Device): - return - - # something else... mark the devices as out-of-date - from Products.ZenModel.DeviceClass import DeviceClass - - while object: - # Don't bother with privately managed objects; the ZenPack - # will handle them on its own - if is_private(object): - return - # walk up until you hit an organizer or a device - if isinstance(object, DeviceClass): - uid = (self.__class__.__name__, self.instance) - self._notifier.notify_subdevices(object, uid, self.notifyAll) - break - - if isinstance(object, Device): - self.notifyAll(object) - break - - object = aq_parent(object) - - @onDelete(Device) - def deviceDeleted(self, object, event): - devid = object.id - for listener in self.listeners: - listener.callRemote("deleteDevice", devid) + self.log.warning( + "Error notifying a listener of new threshold classes" + ) diff --git a/Products/ZenHub/services/ProcessConfig.py b/Products/ZenHub/services/ProcessConfig.py index 965c9d5e51..c9151bd84f 100644 --- a/Products/ZenHub/services/ProcessConfig.py +++ b/Products/ZenHub/services/ProcessConfig.py @@ -16,10 +16,6 @@ from Products.ZenCollector.services.config import CollectorConfigService from Products.ZenEvents import Event -from Products.ZenHub.zodb import onUpdate -from Products.ZenModel.OSProcessClass import OSProcessClass -from Products.ZenModel.OSProcessOrganizer import OSProcessOrganizer -from Products.Zuul.catalog.interfaces import IModelCatalogTool # DeviceProxy must be present for twisted PB serialization to work. from Products.ZenCollector.services.config import DeviceProxy # noqa F401 @@ -87,7 +83,7 @@ def _createDeviceProxy(self, device): return None proxy = CollectorConfigService._createDeviceProxy(self, device) - proxy.configCycleInterval = self._prefs.processCycleInterval + proxy.configCycleInterval = self.conf.processCycleInterval proxy.name = device.id proxy.lastmodeltime = device.getLastChangeString() @@ -168,39 +164,6 @@ def _createDeviceProxy(self, device): if proxy.processes: return proxy - @onUpdate(OSProcessClass) - def processClassUpdated(self, object, event): - devices = set() - for process in object.instances(): - device = process.device() - if not device: - continue - device = device.primaryAq() - device_path = device.getPrimaryUrlPath() - if device_path not in devices: - self._notifyAll(device) - devices.add(device_path) - - @onUpdate(OSProcessOrganizer) - def processOrganizerUpdated(self, object, event): - catalog = IModelCatalogTool(object.primaryAq()) - results = catalog.search(OSProcessClass) - if not results.total: - return - devices = set() - for organizer in results: - if results.areBrains: - organizer = organizer.getObject() - for process in organizer.instances(): - device = process.device() - if not device: - continue - device = device.primaryAq() - device_path = device.getPrimaryUrlPath() - if device_path not in devices: - self._notifyAll(device) - devices.add(device_path) - if __name__ == "__main__": from Products.ZenHub.ServiceTester import ServiceTester diff --git a/Products/ZenHub/services/SnmpTrapConfig.py b/Products/ZenHub/services/SnmpTrapConfig.py index 44255b50aa..cb2d7d26fd 100644 --- a/Products/ZenHub/services/SnmpTrapConfig.py +++ b/Products/ZenHub/services/SnmpTrapConfig.py @@ -7,22 +7,22 @@ # ############################################################################## -from __future__ import print_function - """SnmpTrapConfig Provides configuration for an OID translation service. """ +from __future__ import absolute_import, print_function + +import json import logging +from hashlib import md5 + +from pynetsnmp import usm from twisted.spread import pb -from Products.ZenCollector.services.config import CollectorConfigService -from Products.ZenHub.zodb import onUpdate, onDelete -from Products.ZenModel.DeviceClass import DeviceClass -from Products.ZenModel.Device import Device -from Products.ZenModel.MibBase import MibBase +from Products.ZenHub.HubService import HubService from Products.Zuul.catalog.interfaces import IModelCatalogTool log = logging.getLogger("zen.HubService.SnmpTrapConfig") @@ -41,77 +41,34 @@ class FakeDevice(object): id = "MIB payload" -class User(pb.Copyable, pb.RemoteCopy): - version = None - engine_id = None - username = None - authentication_type = None # MD5 or SHA - authentication_passphrase = None - privacy_protocol = None # DES or AES - privacy_passphrase = None - - def __str__(self): - fmt = ( - "" - ) - return fmt.format(self) - - -pb.setUnjellyableForClass(User, User) - +class User(usm.User, pb.Copyable, pb.RemoteCopy): + def getStateToCopy(self): + state = pb.Copyable.getStateToCopy(self) + if self.auth is not None: + state["auth"] = [self.auth.protocol.name, self.auth.passphrase] + else: + state["auth"] = None + if self.priv is not None: + state["priv"] = [self.priv.protocol.name, self.priv.passphrase] + else: + state["priv"] = None + return state -class SnmpTrapConfig(CollectorConfigService): + def setCopyableState(self, state): + auth_args = state.get("auth") + state["auth"] = usm.Authentication(*auth_args) if auth_args else None + priv_args = state.get("priv") + state["priv"] = usm.Privacy(*priv_args) if priv_args else None + pb.RemoteCopy.setCopyableState(self, state) - # Override _notifyAll, notifyAffectedDevices, _filterDevice and - # _filterDevicesOnly to guarantee that only one MibConfigTask is ever - # sent down to zentrap. - def _notifyAll(self, object): - pass - - @onUpdate(None) # Matches all - def notifyAffectedDevices(self, object, event): - pass - - def _filterDevice(self, device): - return device.id == FakeDevice.id - - def _filterDevices(self, deviceList): - return [FakeDevice()] - - def _createDeviceProxy(self, device): - proxy = CollectorConfigService._createDeviceProxy(self, device) - proxy.configCycleInterval = 3600 - proxy.name = "SNMP Trap Configuration" - proxy.device = device.id +pb.setUnjellyableForClass(User, User) - # Gather all OID -> Name mappings from /Mibs catalog - proxy.oidMap = dict( - (b.oid, b.id) for b in self.dmd.Mibs.mibSearch() if b.oid - ) - return proxy - - def _create_user(self, obj): - # if v3 and has at least one v3 user property, then we want to - # create a user - if obj.getProperty("zSnmpVer", None) != "v3" or not any( - obj.hasProperty(p) for p in SNMPV3_USER_ZPROPS - ): - return - user = User() - user.version = int(obj.zSnmpVer[1]) - user.engine_id = obj.zSnmpEngineId - user.username = obj.zSnmpSecurityName - user.authentication_type = obj.zSnmpAuthType - user.authentication_passphrase = obj.zSnmpAuthPassword - user.privacy_protocol = obj.zSnmpPrivType - user.privacy_passphrase = obj.zSnmpPrivPassword - return user +class SnmpTrapConfig(HubService): + """ + Configuration service for the zentrap collection daemon. + """ def remote_createAllUsers(self): cat = IModelCatalogTool(self.dmd) @@ -121,14 +78,54 @@ def remote_createAllUsers(self): "Products.ZenModel.DeviceClass.DeviceClass", ) ) - users = [] + users = set() for brain in brains: device = brain.getObject() user = self._create_user(device) if user is not None: - users.append(user) + users.add(user) log.debug("SnmpTrapConfig.remote_createAllUsers %s users", len(users)) - return users + return list(users) + + def remote_getTrapFilters(self, remoteCheckSum): + currentCheckSum = md5(self.zem.trapFilters).hexdigest() # noqa S324 + return ( + (None, None) + if currentCheckSum == remoteCheckSum + else (currentCheckSum, self.zem.trapFilters) + ) + + def remote_getOidMap(self, remoteCheckSum): + oidMap = {b.oid: b.id for b in self.dmd.Mibs.mibSearch() if b.oid} + currentCheckSum = md5( # noqa S324 + json.dumps(oidMap, sort_keys=True).encode("utf-8") + ).hexdigest() + return ( + (None, None) + if currentCheckSum == remoteCheckSum + else (currentCheckSum, oidMap) + ) + + def _create_user(self, obj): + # Users are only valid for SNMP v3. + if obj.getProperty("zSnmpVer", None) != "v3": + return + try: + return User( + obj.zSnmpSecurityName, + auth=usm.Authentication( + obj.zSnmpAuthType, obj.zSnmpAuthPassword + ), + priv=usm.Privacy(obj.zSnmpPrivType, obj.zSnmpPrivPassword), + engine=obj.zSnmpEngineId, + context=obj.zSnmpContext, + ) + except Exception as ex: + log.error( + "failed to create SNMP Security user user=%s error=%s", + obj.zSnmpSecurityName, + ex, + ) def _objectUpdated(self, object): user = self._create_user(object) @@ -136,24 +133,6 @@ def _objectUpdated(self, object): for listener in self.listeners: listener.callRemote("createUser", user) - @onUpdate(DeviceClass) - def deviceClassUpdated(self, object, event): - self._objectUpdated(object) - - @onUpdate(Device) - def deviceUpdated(self, object, event): - self._objectUpdated(object) - - @onUpdate(MibBase) - def mibsChanged(self, device, event): - for listener in self.listeners: - listener.callRemote("notifyConfigChanged") - - @onDelete(MibBase) - def mibsDeleted(self, device, event): - for listener in self.listeners: - listener.callRemote("notifyConfigChanged") - if __name__ == "__main__": from pprint import pprint diff --git a/Products/ZenHub/services/SyslogConfig.py b/Products/ZenHub/services/SyslogConfig.py index 6b04e3b7b3..80b5e1dd23 100644 --- a/Products/ZenHub/services/SyslogConfig.py +++ b/Products/ZenHub/services/SyslogConfig.py @@ -1,51 +1,59 @@ ############################################################################## # -# Copyright (C) Zenoss, Inc. 2011, all rights reserved. +# Copyright (C) Zenoss, Inc. 2011, 2023 all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## -from __future__ import print_function - """SyslogConfig Provides configuration for syslog message to Zenoss event conversions. """ -import logging +from __future__ import absolute_import, print_function -from Products.ZenCollector.services.config import CollectorConfigService +import logging -log = logging.getLogger("zen.HubService.SyslogConfig") +from hashlib import md5 +from Products.ZenHub.HubService import HubService +from Products.ZenEvents.zensyslog.config import ConfigUpdates -class FakeDevice(object): - id = "Syslog payload" +log = logging.getLogger("zen.hub.services.syslogconfig") -class SyslogConfig(CollectorConfigService): - def _filterDevices(self, deviceList): - return [FakeDevice()] +class SyslogConfig(HubService): + def remote_getConfig(self, checksums): + result = ConfigUpdates() - def _createDeviceProxy(self, device): - proxy = CollectorConfigService._createDeviceProxy(self, device) - proxy.configCycleInterval = 3600 - proxy.name = "Syslog Configuration" - proxy.device = device.id + priority = self.zem.defaultPriority + priority_checksum = _checksum(priority) + if checksums.priority != priority_checksum: + result.priority = priority + result.checksums.priority = priority_checksum - proxy.defaultPriority = self.zem.defaultPriority - return proxy + use_summary = self.zem.syslogSummaryToMessage + use_summary_checksum = _checksum(use_summary) + if checksums.use_summary != use_summary_checksum: + result.use_summary = use_summary + result.checksums.use_summary = use_summary_checksum + parsers = self.zem.syslogParsers + parsers_checksum = _checksum(parsers) + if checksums.parsers != parsers_checksum: + result.parsers = parsers + result.checksums.parsers = parsers_checksum -if __name__ == "__main__": - from Products.ZenHub.ServiceTester import ServiceTester + rules = self.zem.syslogMsgEvtFieldFilterRules + rules_checksum = _checksum(rules) + if checksums.rules != rules_checksum: + result.rules = rules + result.checksums.rules = rules_checksum - tester = ServiceTester(SyslogConfig) + return result - def printer(config): - print("Default syslog priority = ", config.defaultPriority) - tester.printDeviceProxy = printer - tester.showDeviceInfo() +def _checksum(value): + return md5(str(value)).hexdigest() # noqa: S324 diff --git a/Products/ZenHub/services/ThresholdMixin.py b/Products/ZenHub/services/ThresholdMixin.py index 31093a9c30..9f42e0e687 100644 --- a/Products/ZenHub/services/ThresholdMixin.py +++ b/Products/ZenHub/services/ThresholdMixin.py @@ -7,30 +7,45 @@ # ############################################################################## -from Products.ZenHub.PBDaemon import translateError +import logging +from Products.ZenHub.errors import translateError +from Products.ZenModel.MinMaxThreshold import MinMaxThreshold +from Products.ZenModel.ValueChangeThreshold import ValueChangeThreshold -class ThresholdMixin: +log = logging.getLogger("zen.thresholdmixin") + + +class ThresholdMixin(object): _cached_thresholdClasses = [] @translateError def remote_getThresholdClasses(self): - if not self._cached_thresholdClasses: - from Products.ZenModel.MinMaxThreshold import MinMaxThreshold - from Products.ZenModel.ValueChangeThreshold import ( - ValueChangeThreshold, + log.info("retrieving threshold classes") + try: + if not self._cached_thresholdClasses: + classes = [MinMaxThreshold, ValueChangeThreshold] + for pack in self.dmd.ZenPackManager.packs(): + classes += pack.getThresholdClasses() + self._cached_thresholdClasses = map( + lambda c: c.__module__, classes + ) + return self._cached_thresholdClasses + finally: + log.info( + "retrieved threshold classes: %s", + self._cached_thresholdClasses, ) - classes = [MinMaxThreshold, ValueChangeThreshold] - for pack in self.dmd.ZenPackManager.packs(): - classes += pack.getThresholdClasses() - self._cached_thresholdClasses = map( - lambda c: c.__module__, classes - ) - return self._cached_thresholdClasses - @translateError def remote_getCollectorThresholds(self): from Products.ZenModel.BuiltInDS import BuiltInDS - return self.config.getThresholdInstances(BuiltInDS.sourcetype) + log.info("retrieving threshold instances") + instances = None + try: + instances = self.conf.getThresholdInstances(BuiltInDS.sourcetype) + finally: + log.info("retrieved threshold instances: %s", instances) + + return instances diff --git a/Products/ZenHub/services/ZenStatusConfig.py b/Products/ZenHub/services/ZenStatusConfig.py index a48e20525b..8d85e405d2 100644 --- a/Products/ZenHub/services/ZenStatusConfig.py +++ b/Products/ZenHub/services/ZenStatusConfig.py @@ -72,7 +72,7 @@ def _filterDevice(self, device): def _createDeviceProxy(self, device): proxy = CollectorConfigService._createDeviceProxy(self, device) - proxy.configCycleInterval = self._prefs.statusCycleInterval + proxy.configCycleInterval = self.conf.statusCycleInterval # add each component proxy.components = [] diff --git a/Products/ZenHub/tests/testPBDaemon.py b/Products/ZenHub/tests/testPBDaemon.py index 0fb2987bf6..fa4bd1dbd0 100644 --- a/Products/ZenHub/tests/testPBDaemon.py +++ b/Products/ZenHub/tests/testPBDaemon.py @@ -11,7 +11,7 @@ import os from twisted.internet.defer import failure -from zope.interface import implements +from zope.interface import implementer from zope.component import getGlobalSiteManager from Products.ZenTestCase.BaseTestCase import BaseTestCase @@ -19,22 +19,22 @@ ICollectorEventTransformer, TRANSFORM_DROP, ) -from Products.ZenHub.PBDaemon import ( - DeDupingEventQueue, - DequeEventQueue, - EventQueueManager, +from Products.ZenHub.PBDaemon import PBDaemon +from Products.ZenHub.events.queue.manager import EventQueueManager +from Products.ZenHub.events.queue.deduping import DeDupingEventQueue +from Products.ZenHub.events.queue.deque import DequeEventQueue +from Products.ZenHub.events.queue.fingerprint import ( DefaultFingerprintGenerator, - PBDaemon, ) -_TEST_EVENT = dict( - device="device1", - component="component1", - eventClass="/MyEventClass", - eventKey="MyEventKey", - severity=5, - summary="My summary", -) +_TEST_EVENT = { + "device": "device1", + "component": "component1", + "eventClass": "/MyEventClass", + "eventKey": "MyEventKey", + "severity": 5, + "summary": "My summary", +} log = logging.getLogger("zen.testPBDaemon") @@ -95,7 +95,7 @@ def __init__(self, *args, **kwargs): ) def testDeDuping(self): - for i in range(100): + for _ in range(100): self.queue.append(createTestEvent(mydetail="detailvalue")) self.assertEquals(1, len(self.queue)) queued = list(self.queue) @@ -170,9 +170,8 @@ class MockOptions(object): return options def testAddEventDroppedTransform(self): + @implementer(ICollectorEventTransformer) class DroppingTransformer(object): - implements(ICollectorEventTransformer) - def __init__(self): self.num_dropped = 0 @@ -184,7 +183,7 @@ def transform(self, event): self.gsm.registerUtility(transformer) try: eqm = EventQueueManager(self.createOptions(), log) - for i in range(5): + for _ in range(5): eqm.addEvent(createTestEvent()) finally: self.gsm.unregisterUtility(transformer) @@ -203,13 +202,13 @@ def testDiscarded(self): def testNoDuplicateClears(self): eqm = EventQueueManager(self.createOptions(), log) - for i in range(5): + for _ in range(5): eqm.addEvent(createTestEvent(severity=0)) self.assertEquals(1, len(eqm.event_queue)) sent_events = [] eqm.sendEvents(lambda evts: sent_events.extend(evts)) - for i in range(5): + for _ in range(5): eqm.addEvent(createTestEvent(severity=0)) self.assertEquals(0, len(eqm.event_queue)) @@ -221,7 +220,7 @@ def testDuplicateClears(self): def send_events(evts): sent_events.extend(evts) - for i in range(5): + for _ in range(5): eqm.addEvent(createTestEvent(severity=0)) eqm.sendEvents(send_events) self.assertEquals(5, len(sent_events)) @@ -236,7 +235,7 @@ def send_events(evts): sent_events.extend(evts) eqm = EventQueueManager(opts, log) - for i in range(10): + for _ in range(10): eqm.addEvent(createTestEvent(severity=0)) eqm.sendEvents(send_events) self.assertEquals(2, len(sent_events)) @@ -432,22 +431,25 @@ def put(self, *args): class TestMetricWriter(BaseTestCase): def setUp(self): os.environ["CONTROLPLANE"] = "0" - self.daemon = PBDaemon() - self.daemon._publisher = Publisher() + self.publisher = Publisher() + self.daemon = PBDaemon(publisher=self.publisher) self.metric_writer = self.daemon.metricWriter() def testWriteMetric(self): metric = ["name", 0.0, "now", {}] self.metric_writer.write_metric(*metric) - self.assertEquals([tuple(metric)], self.daemon._publisher.queue) + self.assertEquals([tuple(metric)], self.publisher.queue) class TestInternalMetricWriter(BaseTestCase): def setUp(self): os.environ["CONTROLPLANE"] = "1" - self.daemon = PBDaemon() - self.daemon._publisher = Publisher() - self.daemon._internal_publisher = Publisher() + self.publisher = Publisher() + self.internal_publisher = Publisher() + self.daemon = PBDaemon( + publisher=self.publisher, + internal_publisher=self.internal_publisher, + ) self.metric_writer = self.daemon.metricWriter() def testWriteInternalMetric(self): @@ -456,20 +458,19 @@ def testWriteInternalMetric(self): self.metric_writer.write_metric(*metric) self.metric_writer.write_metric(*internal_metric) self.assertEquals( - [tuple(internal_metric)], self.daemon._internal_publisher.queue + [tuple(internal_metric)], self.internal_publisher.queue ) self.assertEquals( - [tuple(metric), tuple(internal_metric)], - self.daemon._publisher.queue, + [tuple(metric), tuple(internal_metric)], self.publisher.queue ) def testInternalPublisherIsNone(self): - self.daemon._internal_publisher = None + self.daemon.setInternalPublisher(None) del os.environ["CONTROLPLANE_CONSUMER_URL"] self.assertIsNone(self.daemon.internalPublisher()) def testInternalPublisherIsInstance(self): - self.daemon._internal_publisher = None + self.daemon.setInternalPublisher(None) os.environ["CONTROLPLANE_CONSUMER_URL"] = "http://localhost" publisher = self.daemon.internalPublisher() from Products.ZenHub.metricpublisher.publisher import HttpPostPublisher diff --git a/Products/ZenHub/tests/test_HubService.py b/Products/ZenHub/tests/test_HubService.py index 1c6ff22e9a..528f2c6427 100644 --- a/Products/ZenHub/tests/test_HubService.py +++ b/Products/ZenHub/tests/test_HubService.py @@ -18,7 +18,9 @@ def test_init(self): self.assertIsInstance(self.hub_service, pb.Referenceable) # Validate attributes created by __init__ - self.assertEqual(self.hub_service.log, logging.getLogger("zen.hub")) + self.assertEqual( + self.hub_service.log, logging.getLogger("zen.hub.hubservice") + ) self.assertEqual(self.hub_service.fqdn, socket.getfqdn()) self.assertEqual(self.hub_service.dmd, self.dmd) self.assertEqual(self.hub_service.zem, self.dmd.ZenEventManager) diff --git a/Products/ZenHub/tests/test_PBDaemon.py b/Products/ZenHub/tests/test_PBDaemon.py index 3f845f1161..6ff97d0199 100644 --- a/Products/ZenHub/tests/test_PBDaemon.py +++ b/Products/ZenHub/tests/test_PBDaemon.py @@ -2,888 +2,30 @@ import sys from unittest import TestCase -from mock import Mock, patch, create_autospec, call, sentinel - -from zope.interface.verify import verifyObject +from mock import ANY, Mock, patch, create_autospec, call # Breaks Test Isolation. Products/ZenHub/metricpublisher/utils.py:15 # ImportError: No module named eventlet from Products.ZenHub.PBDaemon import ( - BaseEventQueue, - Clear, collections, - ConflictError, - DeDupingEventQueue, - DefaultFingerprintGenerator, defer, - DequeEventQueue, - EventQueueManager, - ICollectorEventFingerprintGenerator, - _load_utilities, - pb, PBDaemon, - RemoteBadMonitor, - RemoteConflictError, - RemoteException, - sha1, - TRANSFORM_DROP, - TRANSFORM_STOP, - translateError, + publisher, ) PATH = {"src": "Products.ZenHub.PBDaemon"} -class RemoteExceptionsTest(TestCase): - """These exceptions can probably be moved into their own module""" - - def test_raise_RemoteException(t): - with t.assertRaises(RemoteException): - raise RemoteException("message", "traceback") - - def test_RemoteException_is_pb_is_copyable(t): - t.assertTrue(issubclass(RemoteException, pb.Copyable)) - t.assertTrue(issubclass(RemoteException, pb.RemoteCopy)) - - def test_raise_RemoteConflictError(t): - with t.assertRaises(RemoteConflictError): - raise RemoteConflictError("message", "traceback") - - def test_RemoteConflictError_is_pb_is_copyable(t): - t.assertTrue(issubclass(RemoteConflictError, pb.Copyable)) - t.assertTrue(issubclass(RemoteConflictError, pb.RemoteCopy)) - - def test_raise_RemoteBadMonitor(t): - with t.assertRaises(RemoteBadMonitor): - raise RemoteBadMonitor("message", "traceback") - - def test_RemoteBadMonitor_is_pb_is_copyable(t): - t.assertTrue(issubclass(RemoteBadMonitor, pb.Copyable)) - t.assertTrue(issubclass(RemoteBadMonitor, pb.RemoteCopy)) - - def test_translateError_transforms_ConflictError(t): - traceback = Mock(spec_set=["_p_oid"]) - - @translateError - def raise_conflict_error(): - raise ConflictError("message", traceback) - - with t.assertRaises(RemoteConflictError): - raise_conflict_error() - - def test_translateError_transforms_Exception(t): - @translateError - def raise_error(): - raise Exception("message", "traceback") - - with t.assertRaises(RemoteException): - raise_error() - - -class DefaultFingerprintGeneratorTest(TestCase): - def test_init(t): - fingerprint_generator = DefaultFingerprintGenerator() - - # the class Implements the Interface - t.assertTrue( - ICollectorEventFingerprintGenerator.implementedBy( - DefaultFingerprintGenerator - ) - ) - # the object provides the interface - t.assertTrue( - ICollectorEventFingerprintGenerator.providedBy( - fingerprint_generator - ) - ) - # Verify the object implments the interface properly - verifyObject( - ICollectorEventFingerprintGenerator, fingerprint_generator - ) - - def test_generate(t): - """Takes an event, chews it up and spits out a sha1 hash - without an intermediate function that returns its internal fields list - we have to duplicate the entire function in test. - REFACTOR: split this up so we can test the fields list generator - and sha generator seperately. - Any method of generating the a hash from the dict should work so long - as its the same hash for the event with the _IGNORE_FILEDS stripped off - """ - event = {"k%s" % i: "v%s" % i for i in range(3)} - fields = [] - for k, v in sorted(event.iteritems()): - fields.extend((k, v)) - expected = sha1("|".join(fields)).hexdigest() - - # any keys listed in _IGNORE_FIELDS are not hashed - for key in DefaultFingerprintGenerator._IGNORE_FIELDS: - event[key] = "IGNORE ME!" - - fingerprint_generator = DefaultFingerprintGenerator() - out = fingerprint_generator.generate(event) - - t.assertEqual(out, expected) - - -class load_utilities_Test(TestCase): - @patch("{src}.getUtilitiesFor".format(**PATH), autospec=True) - def test_load_utilities(t, getUtilitiesFor): - ICollectorEventTransformer = "some transform function" - - def func1(): - pass - - def func2(): - pass - - func1.weight = 100 - func2.weight = 50 - getUtilitiesFor.return_value = (("func1", func1), ("func2", func2)) - - ret = _load_utilities(ICollectorEventTransformer) - - getUtilitiesFor.assert_called_with(ICollectorEventTransformer) - # NOTE: lower weight comes first in the sorted list - # Is this intentional? - t.assertEqual(ret, [func2, func1]) - - -class BaseEventQueueTest(TestCase): - def setUp(t): - t.beq = BaseEventQueue(maxlen=5) - - def test_init(t): - base_event_queue = BaseEventQueue(maxlen=5) - t.assertEqual(base_event_queue.maxlen, 5) - - def test_append(t): - with t.assertRaises(NotImplementedError): - t.beq.append("event") - - def test_popleft(t): - with t.assertRaises(NotImplementedError): - t.beq.popleft() - - def test_extendleft(t): - with t.assertRaises(NotImplementedError): - t.beq.extendleft(["event_a", "event_b"]) - - def test___len__(t): - with t.assertRaises(NotImplementedError): - len(t.beq) - - def test___iter__(t): - with t.assertRaises(NotImplementedError): - [i for i in t.beq] - - -class DequeEventQueueTest(TestCase): - def setUp(t): - t.deq = DequeEventQueue(maxlen=10) - t.event_a, t.event_b = {"name": "event_a"}, {"name": "event_b"} - - def test_init(t): - maxlen = 100 - deq = DequeEventQueue(maxlen=maxlen) - t.assertEqual(deq.maxlen, maxlen) - t.assertIsInstance(deq.queue, collections.deque) - - @patch("{src}.time".format(**PATH)) - def test_append(t, time): - event = {} - deq = DequeEventQueue(maxlen=10) - - ret = deq.append(event) - - # append sets the time the event was added to the queue - t.assertEqual(event["rcvtime"], time.time()) - t.assertEqual(ret, None) - - def test_append_pops_and_returns_leftmost_if_full(t): - event_a, event_b = {"name": "event_a"}, {"name": "event_b"} - deq = DequeEventQueue(maxlen=1) - - deq.append(event_a) - ret = deq.append(event_b) - - t.assertIn(event_b, deq.queue) - t.assertNotIn(event_a, deq.queue) - t.assertEqual(ret, event_a) - - @patch("{src}.time".format(**PATH)) - def test_popleft(t, time): - t.deq.append(t.event_a) - t.deq.append(t.event_b) - - ret = t.deq.queue.popleft() - - t.assertEqual(ret, t.event_a) - - def test_base_popleft(t): - t.deq.queue.append("a") - t.deq.queue.append("b") - - ret = t.deq.queue.popleft() - t.assertEqual(ret, "a") - - @patch("{src}.time".format(**PATH)) - def test_extendleft(t, time): - """WARNING: extendleft does NOT add timestamps, as .append does - is this behavior is intentional? - """ - event_c = {"name": "event_c"} - t.deq.append(event_c) - t.assertEqual(list(t.deq), [event_c]) - events = [t.event_a, t.event_b] - - ret = t.deq.extendleft(events) - - t.assertEqual(ret, []) - t.assertEqual(list(t.deq), [t.event_a, t.event_b, event_c]) - """ - # to validate all events get timestamps - t.assertEqual( - list(t.deq), - [{'name': 'event_a', 'rcvtime': time.time.return_value}, - {'name': 'event_b', 'rcvtime': time.time.return_value}, - {'name': 'event_c', 'rcvtime': time.time.return_value}, - ] - """ - - def test_extendleft_returns_events_if_falsey(t): - ret = t.deq.extendleft(False) - t.assertEqual(ret, False) - ret = t.deq.extendleft([]) - t.assertEqual(ret, []) - ret = t.deq.extendleft(0) - t.assertEqual(ret, 0) - - def test_extendleft_returns_extra_events_if_nearly_full(t): - t.deq.maxlen = 3 - t.deq.extendleft([t.event_a, t.event_b]) - event_c, event_d = {"name": "event_c"}, {"name": "event_d"} - events = [event_c, event_d] - - ret = t.deq.extendleft(events) - - t.assertEqual(list(t.deq), [event_d, t.event_a, t.event_b]) - t.assertEqual(ret, [event_c]) - - def test___len__(t): - ret = len(t.deq) - t.assertEqual(ret, 0) - t.deq.extendleft([t.event_a, t.event_b]) - t.assertEqual(len(t.deq), 2) - - def test___iter__(t): - t.deq.extendleft([t.event_a, t.event_b]) - ret = [event for event in t.deq] - t.assertEqual(ret, [t.event_a, t.event_b]) - - -class DeDupingEventQueueTest(TestCase): - def setUp(t): - t.ddeq = DeDupingEventQueue(maxlen=10) - t.event_a, t.event_b = {"name": "event_a"}, {"name": "event_b"} - - @patch("{src}._load_utilities".format(**PATH)) - def test_init(t, _load_utilities): - ddeq = DeDupingEventQueue(maxlen=10) - t.assertEqual(ddeq.maxlen, 10) - - t.assertIsInstance( - ddeq.default_fingerprinter, DefaultFingerprintGenerator - ) - t.assertEqual(ddeq.fingerprinters, _load_utilities.return_value) - t.assertIsInstance(ddeq.queue, collections.OrderedDict) - - def test_event_fingerprint(t): - t.ddeq.fingerprinters = [] - - ret = t.ddeq._event_fingerprint(t.event_a) - expected = DefaultFingerprintGenerator().generate(t.event_a) - t.assertEqual(ret, expected) - - # Identical events generate the same fingerprint - event_2 = t.event_a.copy() - ret = t.ddeq._event_fingerprint(event_2) - t.assertEqual(ret, expected) - - def test_event_fingerprint_fingerprinters_list(t): - """_event_fingerprint will attempt to generate a fingerprint from - each ICollectorEventFingerprintGenerator it loaded, - and return the first non-falsey value from them - """ - fp1 = Mock(spec_set=["generate"]) - fp1.generate.return_value = None - fp2 = Mock(spec_set=["generate"]) - fp2.generate.side_effect = lambda x: str(x) - # fp2 returns a value, so fp3 is never called - fp3 = Mock(spec_set=["generate"]) - fp3.generate.side_effect = lambda x: 1 / 0 - - t.ddeq.fingerprinters = [fp1, fp2, fp3] - - ret = t.ddeq._event_fingerprint(t.event_a) - - fp1.generate.assert_called_with(t.event_a) - fp2.generate.assert_called_with(t.event_a) - fp3.generate.assert_not_called() - t.assertEqual(ret, str(t.event_a)) - - def test_first_time(t): - """given 2 events, retrun the earliest timestamp of the two - use 'firstTime' if available, else 'rcvtime' - """ - event1 = {"firstTime": 1, "rcvtime": 0} - event2 = {"rcvtime": 2} - - ret = t.ddeq._first_time(event1, event2) - t.assertEqual(ret, 1) - - event1 = {"firstTime": 3, "rcvtime": 1} - event2 = {"rcvtime": 2} - - ret = t.ddeq._first_time(event1, event2) - t.assertEqual(ret, 2) - - @patch("{src}.time".format(**PATH)) - def test_append_timestamp(t, time): - """Make sure every processed event specifies the time it was queued.""" - t.ddeq.append(t.event_a) - event = t.ddeq.popleft() - - t.assertEqual(event["rcvtime"], time.time.return_value) - - @patch("{src}.time".format(**PATH)) - def test_append_deduplication(t, time): - """The same event cannot be added to the queue twice - appending a duplicate event replaces the original - """ - event1 = {"data": "some data"} - event2 = {"data": "some data"} - t.assertEqual(event1, event2) - - t.ddeq.append(event1) - t.ddeq.append(event2) - - t.assertEqual(len(t.ddeq), 1) - - ret = t.ddeq.popleft() - # The new event replaces the old one - t.assertIs(ret, event2) - t.assertEqual(event2["count"], 2) - - @patch("{src}.time".format(**PATH)) - def test_append_deduplicates_and_counts_events(t, time): - time.time.side_effect = (t for t in range(100)) - t.ddeq.append({"name": "event_a"}) - t.assertEqual(list(t.ddeq), [{"rcvtime": 0, "name": "event_a"}]) - t.ddeq.append({"name": "event_a"}) - t.assertEqual( - list(t.ddeq), - [{"rcvtime": 1, "firstTime": 0, "count": 2, "name": "event_a"}], - ) - t.ddeq.append({"name": "event_a"}) - t.assertEqual( - list(t.ddeq), - [{"rcvtime": 2, "firstTime": 0, "count": 3, "name": "event_a"}], - ) - t.ddeq.append({"name": "event_a"}) - t.assertEqual( - list(t.ddeq), - [{"rcvtime": 3, "firstTime": 0, "count": 4, "name": "event_a"}], - ) - - def test_append_pops_and_returns_leftmost_if_full(t): - t.ddeq.maxlen = 1 - - t.ddeq.append(t.event_a) - ret = t.ddeq.append(t.event_b) - - # NOTE: events are stored in a dict, key=fingerprint - t.assertIn(t.ddeq._event_fingerprint(t.event_b), t.ddeq.queue) - t.assertNotIn(t.ddeq._event_fingerprint(t.event_a), t.ddeq.queue) - t.assertEqual(ret, t.event_a) - - def test_popleft(t): - t.ddeq.append(t.event_a) - t.ddeq.append(t.event_b) - - ret = t.ddeq.popleft() - - t.assertEqual(ret, t.event_a) - - def test_popleft_raises_IndexError(t): - """Raises IndexError instead of KeyError, for api compatability""" - with t.assertRaises(IndexError): - t.ddeq.popleft() - - @patch("{src}.time".format(**PATH)) - def test_extendleft(t, time): - """WARNING: extendleft does NOT add timestamps, as .append does - is this behavior is intentional? - """ - event_c = {"name": "event_c"} - t.ddeq.append(event_c) - t.assertEqual(list(t.ddeq), [event_c]) - events = [t.event_a, t.event_b] - - ret = t.ddeq.extendleft(events) - - t.assertEqual(ret, []) - t.assertEqual(list(t.ddeq), [t.event_a, t.event_b, event_c]) - """ - # to validate all events get timestamps - t.assertEqual( - list(t.ddeq), - [{'name': 'event_a', 'rcvtime': time.time.return_value}, - {'name': 'event_b', 'rcvtime': time.time.return_value}, - {'name': 'event_c', 'rcvtime': time.time.return_value}, - ] - ) - """ - - @patch("{src}.time".format(**PATH)) - def test_extendleft_counts_events_BUG(t, time): - time.time.side_effect = (t for t in range(100)) - t.ddeq.extendleft([{"name": "event_a"}, {"name": "event_b"}]) - t.assertEqual( - list(t.ddeq), - # This should work - # [{'rcvtime': 0, 'name': 'event_a'}] - # current behavior - [{"name": "event_a"}, {"name": "event_b"}], - ) - # rcvtime is required, but is not set by extendleft - with t.assertRaises(KeyError): - t.ddeq.extendleft([{"name": "event_a"}, {"name": "event_b"}]) - """ - Test Breaks Here due to missing rcvtime - t.assertEqual( - list(t.ddeq), - [{'rcvtime': 1, 'firstTime': 0, 'count': 2, 'name': 'event_a'}, - {'rcvtime': 1, 'firstTime': 0, 'count': 2, 'name': 'event_b'}] - ) - t.ddeq.extendleft([{'name': 'event_a'}, {'name': 'event_b'}]) - t.assertEqual( - list(t.ddeq), - [{'rcvtime': 2, 'firstTime': 0, 'count': 3, 'name': 'event_a'}, - {'rcvtime': 2, 'firstTime': 0, 'count': 3, 'name': 'event_b'}] - ) - t.ddeq.extendleft([{'name': 'event_a'}, {'name': 'event_b'}]) - t.assertEqual( - list(t.ddeq), - [{'rcvtime': 3, 'firstTime': 0, 'count': 4, 'name': 'event_a'}, - {'rcvtime': 3, 'firstTime': 0, 'count': 4, 'name': 'event_b'}] - ) - """ - - def test_extendleft_returns_events_if_empty(t): - ret = t.ddeq.extendleft([]) - t.assertEqual(ret, []) - - def test_extendleft_returns_extra_events_if_nearly_full(t): - t.ddeq.maxlen = 3 - t.ddeq.extendleft([t.event_a, t.event_b]) - event_c, event_d = {"name": "event_c"}, {"name": "event_d"} - events = [event_c, event_d] - - ret = t.ddeq.extendleft(events) - - t.assertEqual(list(t.ddeq), [event_d, t.event_a, t.event_b]) - t.assertEqual(ret, [event_c]) - - def test___len__(t): - ret = len(t.ddeq) - t.assertEqual(ret, 0) - t.ddeq.extendleft([t.event_a, t.event_b]) - t.assertEqual(len(t.ddeq), 2) - - def test___iter__(t): - t.ddeq.extendleft([t.event_a, t.event_b]) - ret = [event for event in t.ddeq] - t.assertEqual(ret, [t.event_a, t.event_b]) - - -class EventQueueManagerTest(TestCase): - def setUp(t): - options = Mock( - name="options", - spec_set=[ - "maxqueuelen", - "deduplicate_events", - "allowduplicateclears", - "duplicateclearinterval", - "eventflushchunksize", - ], - ) - options.deduplicate_events = True - log = Mock(name="logger.log", spec_set=["debug", "warn"]) - - t.eqm = EventQueueManager(options, log) - t.eqm._initQueues() - - def test_initQueues(t): - options = Mock( - name="options", spec_set=["maxqueuelen", "deduplicate_events"] - ) - options.deduplicate_events = True - log = Mock(name="logger.log", spec_set=[]) - - eqm = EventQueueManager(options, log) - eqm._initQueues() - - t.assertIsInstance(eqm.event_queue, DeDupingEventQueue) - t.assertEqual(eqm.event_queue.maxlen, options.maxqueuelen) - t.assertIsInstance(eqm.perf_event_queue, DeDupingEventQueue) - t.assertEqual(eqm.perf_event_queue.maxlen, options.maxqueuelen) - t.assertIsInstance(eqm.heartbeat_event_queue, collections.deque) - t.assertEqual(eqm.heartbeat_event_queue.maxlen, 1) - - def test_transformEvent(t): - """a transformer mutates and returns an event""" - - def transform(event): - event["transformed"] = True - return event - - transformer = Mock(name="transformer", spec_set=["transform"]) - transformer.transform.side_effect = transform - t.eqm.transformers = [transformer] - - event = {} - ret = t.eqm._transformEvent(event) - - t.assertEqual(ret, event) - t.assertEqual(event, {"transformed": True}) - - def test_transformEvent_drop(t): - """if a transformer returns TRANSFORM_DROP - stop running the event through transformer, and return None - """ - - def transform_drop(event): - return TRANSFORM_DROP - - def transform_bomb(event): - 0 / 0 - - transformer = Mock(name="transformer", spec_set=["transform"]) - transformer.transform.side_effect = transform_drop - transformer_2 = Mock(name="transformer", spec_set=["transform"]) - transformer_2.transform.side_effect = transform_bomb - - t.eqm.transformers = [transformer, transformer_2] - - event = {} - ret = t.eqm._transformEvent(event) - t.assertEqual(ret, None) - - def test_transformEvent_stop(t): - """if a transformer returns TRANSFORM_STOP - stop running the event through transformers, and return the event - """ - - def transform_drop(event): - return TRANSFORM_STOP - - def transform_bomb(event): - 0 / 0 - - transformer = Mock(name="transformer", spec_set=["transform"]) - transformer.transform.side_effect = transform_drop - transformer_2 = Mock(name="transformer", spec_set=["transform"]) - transformer_2.transform.side_effect = transform_bomb - - t.eqm.transformers = [transformer, transformer_2] - - event = {} - ret = t.eqm._transformEvent(event) - t.assertIs(ret, event) - - def test_clearFingerprint(t): - event = {k: k + "_v" for k in t.eqm.CLEAR_FINGERPRINT_FIELDS} - - ret = t.eqm._clearFingerprint(event) - - t.assertEqual( - ret, ("device_v", "component_v", "eventKey_v", "eventClass_v") - ) - - def test__removeDiscardedEventFromClearState(t): - """if the event's fingerprint is in clear_events_count - decrement its value - """ - t.eqm.options.allowduplicateclears = False - t.eqm.options.duplicateclearinterval = 0 - - discarded = {"severity": Clear} - clear_fingerprint = t.eqm._clearFingerprint(discarded) - t.eqm.clear_events_count[clear_fingerprint] = 3 - - t.eqm._removeDiscardedEventFromClearState(discarded) - - t.assertEqual(t.eqm.clear_events_count[clear_fingerprint], 2) - - def test__addEvent(t): - """remove the event from clear_events_count - and append it to the queue - """ - t.eqm.options.allowduplicateclears = False - - queue = Mock(name="queue", spec_set=["append"]) - event = {} - clear_fingerprint = t.eqm._clearFingerprint(event) - t.eqm.clear_events_count = {clear_fingerprint: 3} - - t.eqm._addEvent(queue, event) - - t.assertNotIn(clear_fingerprint, t.eqm.clear_events_count) - queue.append.assert_called_with(event) - - def test__addEvent_status_clear(t): - t.eqm.options.allowduplicateclears = False - t.eqm.options.duplicateclearinterval = 0 - - queue = Mock(name="queue", spec_set=["append"]) - event = {"severity": Clear} - clear_fingerprint = t.eqm._clearFingerprint(event) - - t.eqm._addEvent(queue, event) - - t.assertEqual(t.eqm.clear_events_count[clear_fingerprint], 1) - queue.append.assert_called_with(event) - - def test__addEvent_drop_duplicate_clear_events(t): - t.eqm.options.allowduplicateclears = False - clear_count = 1 - - queue = Mock(name="queue", spec_set=["append"]) - event = {"severity": Clear} - clear_fingerprint = t.eqm._clearFingerprint(event) - t.eqm.clear_events_count = {clear_fingerprint: clear_count} - - t.eqm._addEvent(queue, event) - - # non-clear events are not added to the clear_events_count dict - t.assertNotIn(t.eqm.clear_events_count, clear_fingerprint) - - queue.append.assert_not_called() - - def test__addEvent_drop_duplicate_clear_events_interval(t): - t.eqm.options.allowduplicateclears = False - clear_count = 3 - t.eqm.options.duplicateclearinterval = clear_count - - queue = Mock(name="queue", spec_set=["append"]) - event = {"severity": Clear} - clear_fingerprint = t.eqm._clearFingerprint(event) - t.eqm.clear_events_count = {clear_fingerprint: clear_count} - - t.eqm._addEvent(queue, event) - - # non-clear events are not added to the clear_events_count dict - t.assertNotIn(t.eqm.clear_events_count, clear_fingerprint) - queue.append.assert_not_called() - - def test__addEvent_counts_discarded_events(t): - queue = Mock(name="queue", spec_set=["append"]) - event = {} - discarded_event = {"name": "event"} - queue.append.return_value = discarded_event - - t.eqm._removeDiscardedEventFromClearState = create_autospec( - t.eqm._removeDiscardedEventFromClearState, - ) - t.eqm._discardedEvents.mark = create_autospec( - t.eqm._discardedEvents.mark - ) - - t.eqm._addEvent(queue, event) - - t.eqm._removeDiscardedEventFromClearState.assert_called_with( - discarded_event - ) - t.eqm._discardedEvents.mark.assert_called_with() - t.assertEqual(t.eqm.discarded_events, 1) - - def test_addEvent(t): - t.eqm._addEvent = create_autospec(t.eqm._addEvent) - event = {} - t.eqm.addEvent(event) - - t.eqm._addEvent.assert_called_with(t.eqm.event_queue, event) - - def test_addPerformanceEvent(t): - t.eqm._addEvent = create_autospec(t.eqm._addEvent) - event = {} - t.eqm.addPerformanceEvent(event) - - t.eqm._addEvent.assert_called_with(t.eqm.perf_event_queue, event) - - def test_addHeartbeatEvent(t): - heartbeat_event_queue = Mock(spec_set=t.eqm.heartbeat_event_queue) - t.eqm.heartbeat_event_queue = heartbeat_event_queue - heartbeat_event = {} - t.eqm.addHeartbeatEvent(heartbeat_event) - - heartbeat_event_queue.append.assert_called_with(heartbeat_event) - - def test_sendEvents(t): - """chunks events from EventManager's queues - yields them to the event_sender_fn - and returns a deffered with a result of events sent count - """ - t.eqm.options.eventflushchunksize = 3 - t.eqm.options.maxqueuelen = 5 - t.eqm._initQueues() - heartbeat_events = [{"heartbeat": i} for i in range(2)] - perf_events = [{"perf_event": i} for i in range(2)] - events = [{"event": i} for i in range(2)] - - t.eqm.heartbeat_event_queue.extendleft(heartbeat_events) - # heartbeat_event_queue set to static maxlen=1 - t.assertEqual(len(t.eqm.heartbeat_event_queue), 1) - t.eqm.perf_event_queue.extendleft(perf_events) - t.eqm.event_queue.extendleft(events) - - event_sender_fn = Mock(name="event_sender_fn") - - ret = t.eqm.sendEvents(event_sender_fn) - - # Priority: heartbeat, perf, event - event_sender_fn.assert_has_calls( - [ - call([heartbeat_events[1], perf_events[0], perf_events[1]]), - call([events[0], events[1]]), - ] - ) - t.assertIsInstance(ret, defer.Deferred) - t.assertEqual(ret.result, 5) - - def test_sendEvents_exception_handling(t): - """In case of exception, places events back in the queue, - and remove clear state for any discarded events - """ - t.eqm.options.eventflushchunksize = 3 - t.eqm.options.maxqueuelen = 5 - t.eqm._initQueues() - heartbeat_events = [{"heartbeat": i} for i in range(2)] - perf_events = [{"perf_event": i} for i in range(2)] - events = [{"event": i} for i in range(2)] - - t.eqm.heartbeat_event_queue.extendleft(heartbeat_events) - t.eqm.perf_event_queue.extendleft(perf_events) - t.eqm.event_queue.extendleft(events) - - def event_sender_fn(args): - raise Exception("event_sender_fn failed") - - ret = t.eqm.sendEvents(event_sender_fn) - # validate Exception was raised - t.assertEqual(ret.result.check(Exception), Exception) - # quash the unhandled error in defferd exception - ret.addErrback(Mock()) - - # Heartbeat events get dropped - t.assertNotIn(heartbeat_events[1], t.eqm.heartbeat_event_queue) - # events and perf_events are returned to the queues - t.assertIn(perf_events[0], t.eqm.perf_event_queue) - t.assertIn(events[0], t.eqm.event_queue) - - def test_sendEvents_exception_removes_clear_state_for_discarded(t): - t.eqm.options.eventflushchunksize = 3 - t.eqm.options.maxqueuelen = 2 - t.eqm._initQueues() - events = [{"event": i} for i in range(2)] - - t.eqm.event_queue.extendleft(events) - - def send(args): - t.eqm.event_queue.append({"new_event": 0}) - raise Exception("event_sender_fn failed") - - event_sender_fn = Mock(name="event_sender_fn", side_effect=send) - - t.eqm._removeDiscardedEventFromClearState = create_autospec( - t.eqm._removeDiscardedEventFromClearState, - name="_removeDiscardedEventFromClearState", - ) - - ret = t.eqm.sendEvents(event_sender_fn) - # validate Exception was raised - t.assertEqual(ret.result.check(Exception), Exception) - # quash the unhandled error in differd exception - ret.addErrback(Mock()) - - event_sender_fn.assert_called_with([events[0], events[1]]) - - t.eqm._removeDiscardedEventFromClearState.assert_called_with(events[0]) - - class PBDaemonClassTest(TestCase): - """PBDaemon's __init__ modifies the class attribute heartbeatEvent - so we have to test it separately - - WARNING: this test fails when running all ZenHub tests together - Caused by lines: 605, 606 - for evt in self.startEvent, self.stopEvent, self.heartbeatEvent: - evt.update(details) - which changes the class attribute when __init__ is run the first time - """ + """ """ def test_class_attributes(t): - from Products.ZenHub.PBDaemon import PBDaemon - t.assertEqual(PBDaemon.name, "pbdaemon") t.assertEqual(PBDaemon.initialServices, ["EventService"]) - # this is the problem line, heartbeatEvent differs - # /opt/zenoss/bin/runtests \ - # --type=unit --name Products.ZenHub.tests.test_PBDaemon - # t.assertEqual(PBDaemon.heartbeatEvent, {'eventClass': '/Heartbeat'}) - # /opt/zenoss/bin/runtests --type=unit --name Products.ZenHub - # t.assertEqual( - # PBDaemon.heartbeatEvent, { - # 'device': 'localhost', - # 'eventClass': '/Heartbeat', - # 'component': 'pbdaemon' - # } - # ) - t.assertEqual(PBDaemon.heartbeatTimeout, 60 * 3) t.assertEqual(PBDaemon._customexitcode, 0) - t.assertEqual(PBDaemon._pushEventsDeferred, None) - t.assertEqual(PBDaemon._eventHighWaterMark, None) - t.assertEqual(PBDaemon._healthMonitorInterval, 30) - - -class PBDaemonTest(TestCase): - def setUp(t): - # Patch external dependencies - # current version touches the reactor directly - patches = ["publisher", "reactor"] - for target in patches: - patcher = patch("{src}.{}".format(target, **PATH), autospec=True) - setattr(t, target, patcher.start()) - t.addCleanup(patcher.stop) - - # Required commandline options - sys.argv = [ - "Start", - ] - - t.name = "pb_daemon_name" - t.pbd = PBDaemon(name=t.name) - - # Mock out 'log' to prevent spurious output to stdout. - t.pbd.log = Mock(spec=logging.getLoggerClass()) - - t.pbd.eventQueueManager = Mock( - EventQueueManager, name="eventQueueManager" - ) +class PBDaemonInitTest(TestCase): @patch("{src}.sys".format(**PATH), autospec=True) @patch("{src}.task.LoopingCall".format(**PATH), autospec=True) @patch("{src}.stopEvent".format(**PATH), name="stopEvent", autospec=True) @@ -891,8 +33,16 @@ def setUp(t): @patch("{src}.DaemonStats".format(**PATH), autospec=True) @patch("{src}.EventQueueManager".format(**PATH), autospec=True) @patch("{src}.ZenDaemon.__init__".format(**PATH), autospec=True) + @patch("{src}._getLocalServer".format(**PATH), autospec=True) + @patch("{src}._getZenHubClient".format(**PATH), autospec=True) + @patch("{src}.Thresholds".format(**PATH), autospec=True) + @patch("{src}.ThresholdNotifier".format(**PATH), autospec=True) def test___init__( t, + ThresholdNotifier, + Thresholds, + _getZenHubClient, + _getLocalServer, ZenDaemon_init, EventQueueManager, DaemonStats, @@ -903,6 +53,7 @@ def test___init__( ): noopts = (0,) keeproot = False + name = "pb_init" # Mock out attributes set by the parent class # Because these changes are made on the class, they must be reversable @@ -910,68 +61,47 @@ def test___init__( patch.object(PBDaemon, "options", create=True), patch.object(PBDaemon, "log", create=True), ] - for patcher in t.pbdaemon_patchers: patcher.start() t.addCleanup(patcher.stop) - pbd = PBDaemon(noopts=noopts, keeproot=keeproot, name=t.name) + pbd = PBDaemon(noopts=noopts, keeproot=keeproot, name=name) # runs parent class init # this should really be using super( ZenDaemon_init.assert_called_with(pbd, noopts, keeproot) - t.assertEqual(pbd.name, t.name) - t.assertEqual(pbd.mname, t.name) + t.assertEqual(pbd.name, name) + t.assertEqual(pbd.mname, name) + + zhc = _getZenHubClient.return_value + zhc.notify_on_connect.assert_has_calls( + [call(pbd._load_initial_services), call(ANY)] + ) + + ls = _getLocalServer.return_value + ls.add_resource.assert_called_once_with("zenhub", ANY) - EventQueueManager.assert_called_with(PBDaemon.options, PBDaemon.log) + EventQueueManager.assert_not_called() # Check lots of attributes, should verify that they are needed - t.assertEqual(pbd._thresholds, None) - t.assertEqual(pbd._threshold_notifier, None) + t.assertEqual(pbd._thresholds, Thresholds.return_value) + t.assertEqual(pbd._threshold_notifier, ThresholdNotifier.return_value) t.assertEqual(pbd.rrdStats, DaemonStats.return_value) t.assertEqual(pbd.lastStats, 0) - t.assertEqual(pbd.perspective, None) - t.assertEqual(pbd.services, {}) - t.assertEqual(pbd.eventQueueManager, EventQueueManager.return_value) + t.assertEqual(pbd.services, _getZenHubClient.return_value.services) t.assertEqual(pbd.startEvent, startEvent.copy()) t.assertEqual(pbd.stopEvent, stopEvent.copy()) # appends name and device to start, stop, and heartbeat events - details = {"component": t.name, "device": PBDaemon.options.monitor} + details = {"component": name, "device": PBDaemon.options.monitor} pbd.startEvent.update.assert_called_with(details) pbd.stopEvent.update.assert_called_with(details) - t.assertEqual( - pbd.heartbeatEvent, - { - "device": PBDaemon.options.monitor, - "eventClass": "/Heartbeat", - "component": "pb_daemon_name", - }, - ) # more attributes - t.assertIsInstance(pbd.initialConnect, defer.Deferred) - t.assertEqual(pbd.stopped, False) t.assertIsInstance(pbd.counters, collections.Counter) - t.assertEqual(pbd._pingedZenhub, None) - t.assertEqual(pbd._connectionTimeout, None) - t.assertEqual(pbd._publisher, None) # should be a property - t.assertEqual(pbd._internal_publisher, None) - t.assertEqual(pbd._metric_writer, None) - t.assertEqual(pbd._derivative_tracker, None) t.assertEqual(pbd._metrologyReporter, None) - # Add a shutdown trigger to send a stop event and flush the event queue - t.reactor.addSystemEventTrigger.assert_called_with( - "before", "shutdown", pbd._stopPbDaemon - ) - - # Set up a looping call to support the health check. - t.assertEqual(pbd.healthMonitor, LoopingCall.return_value) - LoopingCall.assert_called_with(pbd._checkZenHub) - pbd.healthMonitor.start.assert_called_with(pbd._healthMonitorInterval) - @patch("{src}.ZenDaemon.__init__".format(**PATH), side_effect=IOError) def test__init__exit_on_ZenDaemon_IOError(t, ZenDaemon): # Mock out attributes set by the parent class @@ -980,35 +110,133 @@ def test__init__exit_on_ZenDaemon_IOError(t, ZenDaemon): log_patcher.start() t.addCleanup(log_patcher.stop) - with t.assertRaises(SystemExit): + with t.assertRaises(IOError): PBDaemon() - # this should be a property + def test_buildOptions(t): + """After initialization, the PBDaemon instance should have + options parsed from its buildOptions method + assertions based on default options + + Patch PBDaemon's __init__, because CmdBase will override config + settings with values from the global.conf file + """ + init_patcher = patch.object( + PBDaemon, "__init__", autospec=True, return_value=None + ) + init_patcher.start() + t.addCleanup(init_patcher.stop) + + pbd = PBDaemon() + pbd.parser = None + pbd.usage = "%prog [options]" + pbd.noopts = True + pbd.inputArgs = None + + # Given no commandline options + sys.argv = [] + pbd.buildOptions() + pbd.parseOptions() + + from Products.ZenHub.PBDaemon import ( + DEFAULT_HUB_HOST, + DEFAULT_HUB_PORT, + DEFAULT_HUB_USERNAME, + DEFAULT_HUB_PASSWORD, + DEFAULT_HUB_MONITOR, + ) + + t.assertEqual(pbd.options.hubhost, DEFAULT_HUB_HOST) # No default + t.assertEqual(pbd.options.hubport, DEFAULT_HUB_PORT) + t.assertEqual(pbd.options.hubusername, DEFAULT_HUB_USERNAME) + t.assertEqual(pbd.options.hubpassword, DEFAULT_HUB_PASSWORD) + t.assertEqual(pbd.options.monitor, DEFAULT_HUB_MONITOR) + t.assertEqual(pbd.options.hubtimeout, 30) + t.assertEqual(pbd.options.allowduplicateclears, False) + t.assertEqual(pbd.options.duplicateclearinterval, 0) + t.assertEqual(pbd.options.eventflushseconds, 5) + t.assertEqual(pbd.options.eventflushseconds, 5.0) + t.assertEqual(pbd.options.eventflushchunksize, 50) + t.assertEqual(pbd.options.maxqueuelen, 5000) + t.assertEqual(pbd.options.queueHighWaterMark, 0.75) + t.assertEqual(pbd.options.zhPingInterval, 120) + t.assertEqual(pbd.options.deduplicate_events, True) + t.assertEqual( + pbd.options.redisUrl, + "redis://localhost:{default}/0".format( + default=publisher.defaultRedisPort + ), + ) + t.assertEqual( + pbd.options.metricBufferSize, publisher.defaultMetricBufferSize + ) + t.assertEqual( + pbd.options.metricsChannel, publisher.defaultMetricsChannel + ) + t.assertEqual( + pbd.options.maxOutstandingMetrics, + publisher.defaultMaxOutstandingMetrics, + ) + t.assertEqual(pbd.options.pingPerspective, True) + t.assertEqual(pbd.options.writeStatistics, 30) + + +class PBDaemonTest(TestCase): + def setUp(t): + # Patch external dependencies + # current version touches the reactor directly + patches = [ + "_getZenHubClient", + "EventClient", + "EventQueueManager", + "LocalServer", + "MetricWriter", + "publisher", + "reactor", + ] + + for target in patches: + patcher = patch("{src}.{}".format(target, **PATH), spec=True) + setattr(t, target, patcher.start()) + t.addCleanup(patcher.stop) + + t.EventClient.counters = Mock(collections.Counter(), autospec=True) + + # Required commandline options + sys.argv = [ + "Start", + ] + + t.name = "pb_daemon_name" + t.pbd = PBDaemon(name=t.name) + t.pbd.fqdn = "fqdn" + + # Mock out 'log' to prevent spurious output to stdout. + t.pbd.log = Mock(spec=logging.getLoggerClass()) + def test_publisher(t): - pbd = PBDaemon(name=t.name) host = "localhost" port = 9999 - pbd.options.redisUrl = "http://{}:{}".format(host, port) + t.pbd.options.redisUrl = "http://{}:{}".format(host, port) - ret = pbd.publisher() + ret = t.pbd.publisher() t.assertEqual(ret, t.publisher.RedisListPublisher.return_value) t.publisher.RedisListPublisher.assert_called_with( host, port, - pbd.options.metricBufferSize, - channel=pbd.options.metricsChannel, - maxOutstandingMetrics=pbd.options.maxOutstandingMetrics, + t.pbd.options.metricBufferSize, + channel=t.pbd.options.metricsChannel, + maxOutstandingMetrics=t.pbd.options.maxOutstandingMetrics, ) @patch("{src}.os".format(**PATH), autospec=True) - def test_internalPublisher(t, os): + def test_internalPublisher(t, _os): # All the methods with this pattern need to be converted to properties - t.assertEqual(t.pbd._internal_publisher, None) url = Mock(name="url", spec_set=[]) username = "username" - password = "password" - os.environ = { + password = "password" # noqa S105 + _os.environ = { "CONTROLPLANE_CONSUMER_URL": url, "CONTROLPLANE_CONSUMER_USERNAME": username, "CONTROLPLANE_CONSUMER_PASSWORD": password, @@ -1022,144 +250,52 @@ def test_internalPublisher(t, os): password, url, ) - t.assertEqual(t.pbd._internal_publisher, ret) + t.assertEqual(t.pbd.internalPublisher(), ret) @patch("{src}.os".format(**PATH), autospec=True) - @patch("{src}.MetricWriter".format(**PATH), autospec=True) - def test_metricWriter_legacy(t, MetricWriter, os): - t.assertEqual(t.pbd._metric_writer, None) - + def test_metricWriter_legacy(t, _os): t.pbd.publisher = create_autospec(t.pbd.publisher) t.pbd.internalPublisher = create_autospec(t.pbd.internalPublisher) - os.environ = {"CONTROLPLANE": "0"} + _os.environ = {"CONTROLPLANE": "0"} ret = t.pbd.metricWriter() - MetricWriter.assert_called_with(t.pbd.publisher()) - t.assertEqual(ret, MetricWriter.return_value) - t.assertEqual(t.pbd._metric_writer, ret) + t.MetricWriter.assert_called_with(t.pbd.publisher()) + t.assertEqual(ret, t.MetricWriter.return_value) + t.assertEqual(t.pbd.metricWriter(), ret) @patch("{src}.AggregateMetricWriter".format(**PATH), autospec=True) @patch("{src}.FilteredMetricWriter".format(**PATH), autospec=True) @patch("{src}.os".format(**PATH), autospec=True) - @patch("{src}.MetricWriter".format(**PATH), autospec=True) def test_metricWriter_controlplane( - t, MetricWriter, os, FilteredMetricWriter, AggregateMetricWriter + t, _os, _FilteredMetricWriter, _AggregateMetricWriter ): - t.assertEqual(t.pbd._metric_writer, None) - t.pbd.publisher = create_autospec(t.pbd.publisher, name="publisher") t.pbd.internalPublisher = create_autospec( t.pbd.internalPublisher, name="internalPublisher" ) - os.environ = {"CONTROLPLANE": "1"} + _os.environ = {"CONTROLPLANE": "1"} ret = t.pbd.metricWriter() - MetricWriter.assert_called_with(t.pbd.publisher()) - AggregateMetricWriter.assert_called_with( - [MetricWriter.return_value, FilteredMetricWriter.return_value] + t.MetricWriter.assert_called_with(t.pbd.publisher()) + _AggregateMetricWriter.assert_called_with( + [t.MetricWriter.return_value, _FilteredMetricWriter.return_value] ) - t.assertEqual(ret, AggregateMetricWriter.return_value) - t.assertEqual(t.pbd._metric_writer, ret) + t.assertEqual(ret, _AggregateMetricWriter.return_value) + t.assertEqual(t.pbd.metricWriter(), ret) @patch("{src}.DerivativeTracker".format(**PATH), autospec=True) - def test_derivativeTracker(t, DerivativeTracker): - t.assertEqual(t.pbd._derivative_tracker, None) - + def test_derivativeTracker(t, _DerivativeTracker): ret = t.pbd.derivativeTracker() - t.assertEqual(ret, DerivativeTracker.return_value) - t.assertEqual(t.pbd._derivative_tracker, ret) - - def test_connecting(t): - # logs a message, noop - t.pbd.connecting() - - def test_getZenhubInstanceId(t): - # returns a deferred, should be replaced with inlineCallbacks - perspective = Mock(name="perspective", spec_set=["callRemote"]) - t.pbd.perspective = perspective - - ret = t.pbd.getZenhubInstanceId() - - t.assertEqual(ret, perspective.callRemote.return_value) - perspective.callRemote.assert_called_with("getHubInstanceId") - - def test_gotPerspective(t): - perspective = Mock(name="perspective", spec_set=["callRemote"]) - _connectionTimeout = Mock( - spec_set=t.pbd._connectionTimeout, name="_connectionTimeout" - ) - t.pbd._connectionTimeout = _connectionTimeout - getInitialServices = Mock(name="getInitialServices", spec_set=[]) - t.pbd.getInitialServices = getInitialServices - initialConnect = Mock(name="initialConnect", spec_set=[]) - t.pbd.initialConnect = initialConnect - - t.pbd.gotPerspective(perspective) - - # sets the perspective attribute - t.assertEqual(t.pbd.perspective, perspective) - # if _connectionTimeoutcall is set call _connectionTimeout.cancel() - _connectionTimeout.cancel.assert_called_with() - t.assertEqual(t.pbd._connectionTimeout, None) - # if initialConnect is set, it is set to None, - # and executed after getInitialServices as a deferred - getInitialServices.assert_called_with() - d2 = getInitialServices.return_value - t.assertEqual(t.pbd.initialConnect, None) - d2.chainDeferred.assert_called_with(initialConnect) - - @patch( - "{src}.credentials".format(**PATH), name="credentials", autospec=True - ) - @patch( - "{src}.ReconnectingPBClientFactory".format(**PATH), - name="ReconnectingPBClientFactory", - autospec=True, - ) - def test_connect(t, ReconnectingPBClientFactory, credentials): - factory = ReconnectingPBClientFactory.return_value - options = t.pbd.options - connectTimeout = Mock(t.pbd.connectTimeout, name="connectTimeout") - t.pbd.connectTimeout = connectTimeout + t.assertEqual(ret, _DerivativeTracker.return_value) + def test_connect(t): + zhc = t._getZenHubClient.return_value + expected = zhc.start.return_value ret = t.pbd.connect() - - # ensure the connection factory is setup properly - factory.connectTCP.assert_called_with(options.hubhost, options.hubport) - t.assertEqual(factory.gotPerspective, t.pbd.gotPerspective) - t.assertEqual(factory.connecting, t.pbd.connecting) - credentials.UsernamePassword.assert_called_with( - options.hubusername, options.hubpassword - ) - factory.setCredentials.assert_called_with( - credentials.UsernamePassword.return_value - ) - - # connectionTimeout is set - t.assertEqual( - t.pbd._connectionTimeout, t.reactor.callLater.return_value - ) - - # returns pbd.initialconnect, not sure where the factory goes - t.assertEqual(ret, t.pbd.initialConnect) - - # test timeout method passed to reactor.callLater - # unpack the args to get the timeout function - args, kwargs = t.reactor.callLater.call_args - timeout = args[1] - d = Mock(defer.Deferred, name="initialConnect.result") - d.called = False - timeout(d) - connectTimeout.assert_called_with() - - def test_connectTimeout(t): - """logs a message and passes, - not to be confused with _connectionTimeout, which is set to a deferred - """ - t.pbd.connectTimeout() + t.assertEqual(ret, expected) def test_eventService(t): # alias for getServiceNow @@ -1169,108 +305,72 @@ def test_eventService(t): def test_getServiceNow(t): svc_name = "svc_name" - t.pbd.services[svc_name] = "some service" + zhc = t._getZenHubClient.return_value + zhc.services = {svc_name: "some service"} ret = t.pbd.getServiceNow(svc_name) t.assertEqual(ret, t.pbd.services[svc_name]) @patch("{src}.FakeRemote".format(**PATH), autospec=True) def test_getServiceNow_FakeRemote_on_missing_service(t, FakeRemote): - ret = t.pbd.getServiceNow("svc_name") + svc_name = "svc_name" + zhc = t._getZenHubClient.return_value + zhc.services = {} + + ret = t.pbd.getServiceNow(svc_name) t.assertEqual(ret, FakeRemote.return_value) def test_getService_known_service(t): + zhc = t._getZenHubClient.return_value t.pbd.services["known_service"] = "service" ret = t.pbd.getService("known_service") t.assertIsInstance(ret, defer.Deferred) - t.assertEqual(ret.result, t.pbd.services["known_service"]) + t.assertEqual(ret.result, zhc.get_service.return_value) def test_getService(t): """this is going to be ugly to test, and badly needs to be rewritten as an inlineCallback """ - perspective = Mock(name="perspective", spec_set=["callRemote"]) - serviceListeningInterface = Mock( - name="serviceListeningInterface", spec_set=[] - ) - t.pbd.perspective = perspective + zhc = t._getZenHubClient.return_value + serviceListeningInterface = object() service_name = "service_name" - ret = t.pbd.getService(service_name, serviceListeningInterface) + actual = t.pbd.getService(service_name, serviceListeningInterface) - perspective.callRemote.assert_called_with( - "getService", - service_name, - t.pbd.options.monitor, - serviceListeningInterface, - t.pbd.options.__dict__, + t.assertIsInstance(actual, defer.Deferred) + t.assertEqual(actual.result, zhc.get_service.return_value) + zhc.get_service.assert_has_calls( + [ + call( + service_name, + t.pbd.options.monitor, + serviceListeningInterface, + t.pbd.options.__dict__, + ) + ] ) - t.assertEqual(ret, perspective.callRemote.return_value) - - # Pull the callbacks out of ret, to make sure they work as intended - args, kwargs = ret.addCallback.call_args - callback = args[0] - # callback adds the service to pbd.services - ret_callback = callback(ret.result, service_name) - t.assertEqual(t.pbd.services[service_name], ret.result) - # the service (result) has notifyOnDisconnect called with removeService - args, kwargs = ret_callback.notifyOnDisconnect.call_args - removeService = args[0] - # removeService - removeService(service_name) - t.assertNotIn(service_name, t.pbd.services) - - @patch("{src}.defer".format(**PATH), autospec=True) - def test_getInitialServices(t, defer): + + def test__load_initial_services(t): # , defer): """execute getService(svc_name) for every service in initialServices in parallel deferreds """ - getService = create_autospec(t.pbd.getService, name="getService") + getService = Mock(name="getService") t.pbd.getService = getService - ret = t.pbd.getInitialServices() - defer.DeferredList.assert_called_with( - [getService.return_value for svc in t.pbd.initialServices], - fireOnOneErrback=True, - consumeErrors=True, - ) + t.pbd._load_initial_services() + getService.assert_has_calls( - [call(svc) for svc in t.pbd.initialServices] + [call(svcname) for svcname in t.pbd.initialServices] ) - t.assertEqual(ret, defer.DeferredList.return_value) - def test_connected(t): # does nothing t.pbd.connected() - @patch("{src}.ThresholdNotifier".format(**PATH), autospec=True) - def test__getThresholdNotifier(t, ThresholdNotifier): - # refactor to be a property - t.assertEqual(t.pbd._threshold_notifier, None) - ret = t.pbd._getThresholdNotifier() - - ThresholdNotifier.assert_called_with( - t.pbd.sendEvent, t.pbd.getThresholds() - ) - t.assertEqual(ret, ThresholdNotifier.return_value) - t.assertEqual(t.pbd._threshold_notifier, ret) - - @patch("{src}.Thresholds".format(**PATH), autospec=True) - def test_getThresholds(t, Thresholds): - # refactor to be a property - t.assertEqual(t.pbd._thresholds, None) - - ret = t.pbd.getThresholds() - - Thresholds.assert_called_with() - t.assertEqual(ret, Thresholds.return_value) - t.assertEqual(t.pbd._thresholds, ret) - @patch("{src}.sys".format(**PATH), autospec=True) @patch("{src}.task".format(**PATH), autospec=True) @patch("{src}.TwistedMetricReporter".format(**PATH), autospec=True) - def test_run(t, TwistedMetricReporter, task, sys): + def test_run(t, _TwistedMetricReporter, _task, _sys): """Starts up all of the internal loops, does not return until reactor.run() completes (reactor is shutdown) """ @@ -1278,57 +378,29 @@ def test_run(t, TwistedMetricReporter, task, sys): t.pbd.connect = create_autospec(t.pbd.connect) t.pbd._customexitcode = 99 t.pbd.options = Mock(name="options", cycle=True) - t.pbd._metric_writer = sentinel._metric_writer + t.pbd._PBDaemon__server = Mock() + host = "localhost" + port = 9999 + t.pbd.options.redisUrl = "http://{}:{}".format(host, port) t.pbd.run() - # adds startStatsLoop to reactor.callWhenRunning - args, kwargs = t.reactor.callWhenRunning.call_args - startStatsLoop = args[0] - ret = startStatsLoop() - task.LoopingCall.assert_called_with(t.pbd.postStatistics) - loop = task.LoopingCall.return_value - loop.start.assert_called_with(t.pbd.options.writeStatistics, now=False) - daemonTags = { - "zenoss_daemon": t.pbd.name, - "zenoss_monitor": t.pbd.options.monitor, - "internal": True, - } - TwistedMetricReporter.assert_called_with( - t.pbd.options.writeStatistics, - t.pbd.metricWriter(), - daemonTags, - ) - t.assertEqual( - t.pbd._metrologyReporter, TwistedMetricReporter.return_value - ) - t.pbd._metrologyReporter.start.assert_called_with() - - # adds stopReporter (defined internally) to reactor before shutdown - args, kwargs = t.reactor.addSystemEventTrigger.call_args - stopReporter = args[2] - t.assertEqual(args[0], "before") - t.assertEqual(args[1], "shutdown") - ret = stopReporter() - t.assertEqual(ret, t.pbd._metrologyReporter.stop.return_value) - t.pbd._metrologyReporter.stop.assert_called_with() + t.pbd.connect.assert_called_with() t.pbd.rrdStats.config.assert_called_with( t.pbd.name, t.pbd.options.monitor, t.pbd.metricWriter(), - t.pbd._getThresholdNotifier(), + t.pbd._threshold_notifier, t.pbd.derivativeTracker(), ) - # returns a deferred, that has a callback added to it - # but we have no access to it from outside the function - t.pbd.connect.assert_called_with() - + t.reactor.callWhenRunning.assert_called_with(t.pbd._started) t.reactor.run.assert_called_with() + # only calls sys.exit if a custom exitcode is set, should probably # exit even if exitcode = 0 - sys.exit.assert_called_with(t.pbd._customexitcode) + _sys.exit.assert_called_with(t.pbd._customexitcode) def test_setExitCode(t): exitcode = Mock() @@ -1341,67 +413,28 @@ def test_stop(t): t.pbd.stop() t.reactor.stop.assert_called_with() - def test__stopPbDaemon(t): - # set stopped=True, and send a stopEvent - t.assertFalse(t.pbd.stopped) - t.pbd.services["EventService"] = True - t.pbd.options.cycle = True - t.pbd.sendEvent = Mock(t.pbd.sendEvent, name="sendEvent") - t.pbd.pushEvents = Mock(t.pbd.pushEvents, name="pushEvents") - - ret = t.pbd._stopPbDaemon() - - t.assertTrue(t.pbd.stopped) - - # send a stopEvent if it has an EventService - t.pbd.sendEvent.assert_called_with(t.pbd.stopEvent) - t.assertEqual(ret, t.pbd.pushEvents.return_value) - - def test__stopPbDaemon_pushEventsDeferred(t): - # if _pushEventsDeferred is set, append a new pushEvents deffered to it - t.pbd._pushEventsDeferred = Mock( - defer.Deferred, name="_pushEventsDeferred" - ) - t.assertFalse(t.pbd.stopped) - t.pbd.services["EventService"] = True - t.pbd.options.cycle = True - t.pbd.sendEvent = Mock(t.pbd.sendEvent, name="sendEvent") - t.pbd.pushEvents = Mock(t.pbd.pushEvents, name="pushEvents") - - ret = t.pbd._stopPbDaemon() - - t.assertTrue(t.pbd.stopped) - - # send a stopEvent if it has an EventService - t.pbd.sendEvent.assert_called_with(t.pbd.stopEvent) - t.assertEqual(ret, t.pbd._pushEventsDeferred) - # unable to test pushEvents added as callback - # blocked by maybe unneccesary lambda - def test_sendEvents(t): - # simply maps events to sendEvent - t.pbd.sendEvent = Mock(t.pbd.sendEvent, name="sendEvent") + ec = t.EventClient.return_value + t.pbd._setup_event_client() events = [{"name": "evt_a"}, {"name": "evt_b"}] - t.pbd.sendEvents(events) + d = t.pbd.sendEvents(events) - t.pbd.sendEvent.assert_has_calls([call(event) for event in events]) + t.assertEqual(d, ec.sendEvents.return_value) + ec.sendEvents.assert_called_with(events) - @patch("{src}.defer".format(**PATH), autospec=True) - def test_sendEvent(t, defer): - # appends events to the in-memory outbound queue + def test_sendEvent(t): + ec = t.EventClient.return_value + sendEvent = Mock(name="sendEvent") + ec.sendEvent = sendEvent + t.pbd._setup_event_client() event = {"name": "event"} - generated_event = t.pbd.generateEvent(event, newkey="newkey") - t.pbd.eventQueueManager.event_queue_length = 0 - t.assertEqual(t.pbd.counters["eventCount"], 0) - t.pbd._eventHighWaterMark = False - ret = t.pbd.sendEvent(event, newkey="newkey") + d = t.pbd.sendEvent(event, newkey="newkey") - t.pbd.eventQueueManager.addEvent.assert_called_with(generated_event) - t.assertEqual(t.pbd.counters["eventCount"], 1) - defer.succeed.assert_called_with(None) - t.assertEqual(ret, defer.succeed.return_value) + t.assertIsInstance(d, defer.Deferred) + t.assertIsNone(d.result) + sendEvent.assert_has_calls([call(event, newkey="newkey")]) def test_generateEvent(t): # returns a dict with keyword args, and other values added @@ -1415,84 +448,19 @@ def test_generateEvent(t): "name": "event", "newkey": "newkey", "agent": t.pbd.name, - "monitor": t.pbd.options.monitor, "manager": t.pbd.fqdn, + "monitor": t.pbd.options.monitor, }, ) - def test_generateEvent_reactor_not_running(t): - # returns nothing if reactor is not running - t.reactor.running = False - ret = t.pbd.generateEvent({"name": "event"}) - t.assertEqual(ret, None) - - def test_pushEventsLoop(t): - """currently an old-style convoluted looping deferred - this needs to be refactored to run in a task.loopingCall - """ - t.pbd.pushEvents = create_autospec(t.pbd.pushEvents, name="pushEvents") - - ret = t.pbd.pushEventsLoop() - - t.reactor.callLater.assert_called_with( - t.pbd.options.eventflushseconds, t.pbd.pushEventsLoop - ) - t.pbd.pushEvents.assert_called_with() - - t.assertEqual(ret.result, None) - - @patch("{src}.partial".format(**PATH), autospec=True) - @patch("{src}.defer".format(**PATH), autospec=True) - def test_pushEvents(t, defer, partial): - """Does excessive pre-checking, and book keeping before sending - sending the Event Service remote procedure 'sendEvents' - to the eventQueueManager.sendEvents function - - All of this event management work needs to be refactored into its own - EventManager Class - """ - t.pbd.eventQueueManager.discarded_events = None - t.reactor.running = True - t.assertEqual(t.pbd._eventHighWaterMark, None) - t.assertEqual(t.pbd._pushEventsDeferred, None) - evtSvc = Mock(name="event_service", spec_set=["callRemote"]) - t.pbd.services["EventService"] = evtSvc - - t.pbd.pushEvents() - - partial.assert_called_with(evtSvc.callRemote, "sendEvents") - send_events_fn = partial.return_value - t.pbd.eventQueueManager.sendEvents.assert_called_with(send_events_fn) - - def test_pushEvents_reactor_not_running(t): - # do nothing if the reactor is not running - t.reactor.running = False - t.pbd.log = Mock(t.pbd.log, name="log") - t.pbd.pushEvents() - # really ugly way of checking we entered this block - t.pbd.log.debug.assert_called_with( - "Skipping event sending - reactor not running." - ) - - def test_heartbeat(t): - t.pbd.options.cycle = True - t.pbd.niceDoggie = create_autospec(t.pbd.niceDoggie, name="niceDoggie") - - t.pbd.heartbeat() - - heartbeat_event = t.pbd.generateEvent( - t.pbd.heartbeatEvent, timeout=t.pbd.heartbeatTimeout - ) - t.pbd.eventQueueManager.addHeartbeatEvent.assert_called_with( - heartbeat_event - ) - t.pbd.niceDoggie.assert_called_with(t.pbd.heartbeatTimeout / 3) - def test_postStatisticsImpl(t): # does nothing, maybe implemented by subclasses t.pbd.postStatisticsImpl() def test_postStatistics(t): + ec = t.EventClient.return_value + ec.counters = collections.Counter() + t.pbd._setup_event_client() # sets rrdStats, then calls postStatisticsImpl t.pbd.rrdStats = Mock(name="rrdStats", spec_set=["counter"]) ctrs = {"c1": 3, "c2": 5} @@ -1506,14 +474,14 @@ def test_postStatistics(t): ) @patch("{src}.os".format(**PATH)) - def test__pickleName(t, os): + def test__pickleName(t, _os): # refactor as a property ret = t.pbd._pickleName() - os.environ.get.assert_called_with("CONTROLPLANE_INSTANCE_ID") + _os.environ.get.assert_called_with("CONTROLPLANE_INSTANCE_ID") t.assertEqual( ret, "var/{}_{}_counters.pickle".format( - t.pbd.name, os.environ.get.return_value + t.pbd.name, _os.environ.get.return_value ), ) @@ -1533,149 +501,3 @@ def test_remote_shutdown(t): def test_remote_setPropertyItems(t): # does nothing t.pbd.remote_setPropertyItems("items arg is ignored") - - def test_remote_updateThresholdClasses(t): - """attempts to call importClass for all class names in classes arg - currently imports the importClasses within the method its self, - making patching and testing impossible - - used exclusively by Products.DataCollector.zenmodeler.ZenModeler - """ - pass - # ret = t.pbd.remote_updateThresholdClasses(['class_a', 'class_b']) - # t.assertEqual(ret, 'something') - - def test__checkZenHub(t): - t.pbd._signalZenHubAnswering = create_autospec( - t.pbd._signalZenHubAnswering, name="_signalZenHubAnswering" - ) - perspective = Mock(name="perspective", spec_set=["callRemote"]) - t.pbd.perspective = perspective - - ret = t.pbd._checkZenHub() - - perspective.callRemote.assert_called_with("ping") - t.assertEqual(ret, t.pbd.perspective.callRemote.return_value) - # Get the internally defined callback to test it - args, kwargs = ret.addCallback.call_args - callback = args[0] - # if perspective.callRemote('ping') returns 'pong' - callback(result="pong") - t.pbd._signalZenHubAnswering.assert_called_with(True) - # any other result calls _signalZenHubAnswering(False) - callback(result=None) - t.pbd._signalZenHubAnswering.assert_called_with(False) - - def test__checkZenHub_without_perspective(t): - t.pbd.perspective = False - t.pbd._signalZenHubAnswering = create_autospec( - t.pbd._signalZenHubAnswering, name="_signalZenHubAnswering" - ) - - t.pbd._checkZenHub() - - t.pbd._signalZenHubAnswering.assert_called_with(False) - - def test__checkZenHub_exception(t): - perspective = Mock(name="perspective", spec_set=["callRemote"]) - perspective.callRemote.side_effect = Exception - t.pbd.perspective = perspective - - t.pbd._signalZenHubAnswering = create_autospec( - t.pbd._signalZenHubAnswering, name="_signalZenHubAnswering" - ) - - t.pbd._checkZenHub() - - t.pbd._signalZenHubAnswering.assert_called_with(False) - - @patch("{src}.zenPath".format(**PATH), name="zenPath", autospec=True) - @patch( - "{src}.atomicWrite".format(**PATH), name="atomicWrite", autospec=True - ) - def test__signalZenHubAnswering_True(t, atomicWrite, zenPath): - """creates an empty file named zenhub_connected, if zenhub is answering - removes it if zenhub is not answering - """ - filename = "zenhub_connected" - t.pbd._signalZenHubAnswering(True) - zenPath.assert_called_with("var", filename) - atomicWrite(filename, "") - - @patch("{src}.os".format(**PATH), name="os", autospec=True) - @patch("{src}.zenPath".format(**PATH), name="zenPath", autospec=True) - def test__signalZenHubAnswering_False(t, zenPath, os): - """creates an empty file named zenhub_connected, if zenhub is answering - removes it if zenhub is not answering - """ - filename = "zenhub_connected" - t.pbd._signalZenHubAnswering(False) - zenPath.assert_called_with("var", filename) - os.remove.assert_called_with(zenPath.return_value) - - def test_buildOptions(t): - """After initialization, the InvalidationWorker instance should have - options parsed from its buildOptions method - assertions based on default options - - Patch ZenDaemon's init, because CmdBase will override config - settings with values from the global.conf file - """ - t.init_patcher = patch.object( - PBDaemon, "__init__", autospec=True, return_value=None - ) - t.init_patcher.start() - t.addCleanup(t.init_patcher.stop) - - t.pbd = PBDaemon() - t.pbd.parser = None - t.pbd.usage = "%prog [options]" - t.pbd.noopts = True - t.pbd.inputArgs = None - - # Given no commandline options - sys.argv = [] - t.pbd.buildOptions() - t.pbd.parseOptions() - - from Products.ZenHub.PBDaemon import ( - DEFAULT_HUB_HOST, - DEFAULT_HUB_PORT, - DEFAULT_HUB_USERNAME, - DEFAULT_HUB_PASSWORD, - DEFAULT_HUB_MONITOR, - ) - - t.assertEqual(t.pbd.options.hubhost, DEFAULT_HUB_HOST) # No default - t.assertEqual(t.pbd.options.hubport, DEFAULT_HUB_PORT) - t.assertEqual(t.pbd.options.hubusername, DEFAULT_HUB_USERNAME) - t.assertEqual(t.pbd.options.hubpassword, DEFAULT_HUB_PASSWORD) - t.assertEqual(t.pbd.options.monitor, DEFAULT_HUB_MONITOR) - t.assertEqual(t.pbd.options.hubtimeout, 30) - t.assertEqual(t.pbd.options.allowduplicateclears, False) - t.assertEqual(t.pbd.options.duplicateclearinterval, 0) - t.assertEqual(t.pbd.options.eventflushseconds, 5) - t.assertEqual(t.pbd.options.eventflushseconds, 5.0) - t.assertEqual(t.pbd.options.eventflushchunksize, 50) - t.assertEqual(t.pbd.options.maxqueuelen, 5000) - t.assertEqual(t.pbd.options.queueHighWaterMark, 0.75) - t.assertEqual(t.pbd.options.zhPingInterval, 120) - t.assertEqual(t.pbd.options.deduplicate_events, True) - t.assertEqual( - t.pbd.options.redisUrl, - "redis://localhost:{default}/0".format( - default=t.publisher.defaultRedisPort - ), - ) - t.assertEqual( - t.pbd.options.metricBufferSize, t.publisher.defaultMetricBufferSize - ) - t.assertEqual( - t.pbd.options.metricsChannel, t.publisher.defaultMetricsChannel - ) - t.assertEqual( - t.pbd.options.maxOutstandingMetrics, - t.publisher.defaultMaxOutstandingMetrics, - ) - t.assertEqual(t.pbd.options.pingPerspective, True) - t.assertEqual(t.pbd.options.writeStatistics, 30) diff --git a/Products/ZenHub/tests/test_errors.py b/Products/ZenHub/tests/test_errors.py new file mode 100644 index 0000000000..51bb3b5c18 --- /dev/null +++ b/Products/ZenHub/tests/test_errors.py @@ -0,0 +1,57 @@ +from mock import Mock +from unittest import TestCase + +from ..errors import ( + ConflictError, + pb, + RemoteException, + RemoteBadMonitor, + RemoteConflictError, + translateError, +) + + +class RemoteExceptionsTest(TestCase): + """These exceptions can probably be moved into their own module""" + + def test_raise_RemoteException(t): + with t.assertRaises(RemoteException): + raise RemoteException("message", "traceback") + + def test_RemoteException_is_pb_is_copyable(t): + t.assertTrue(issubclass(RemoteException, pb.Copyable)) + t.assertTrue(issubclass(RemoteException, pb.RemoteCopy)) + + def test_raise_RemoteConflictError(t): + with t.assertRaises(RemoteConflictError): + raise RemoteConflictError("message", "traceback") + + def test_RemoteConflictError_is_pb_is_copyable(t): + t.assertTrue(issubclass(RemoteConflictError, pb.Copyable)) + t.assertTrue(issubclass(RemoteConflictError, pb.RemoteCopy)) + + def test_raise_RemoteBadMonitor(t): + with t.assertRaises(RemoteBadMonitor): + raise RemoteBadMonitor("message", "traceback") + + def test_RemoteBadMonitor_is_pb_is_copyable(t): + t.assertTrue(issubclass(RemoteBadMonitor, pb.Copyable)) + t.assertTrue(issubclass(RemoteBadMonitor, pb.RemoteCopy)) + + def test_translateError_transforms_ConflictError(t): + traceback = Mock(spec_set=["_p_oid"]) + + @translateError + def raise_conflict_error(): + raise ConflictError("message", traceback) + + with t.assertRaises(RemoteConflictError): + raise_conflict_error() + + def test_translateError_transforms_Exception(t): + @translateError + def raise_error(): + raise Exception("message", "traceback") + + with t.assertRaises(RemoteException): + raise_error() diff --git a/Products/ZenHub/tests/test_invalidationfilter.py b/Products/ZenHub/tests/test_invalidationfilter.py index 767f002d9b..952f4f9da6 100644 --- a/Products/ZenHub/tests/test_invalidationfilter.py +++ b/Products/ZenHub/tests/test_invalidationfilter.py @@ -1,9 +1,11 @@ -from unittest import TestCase from mock import Mock, patch, create_autospec - +from Products.ZCatalog.interfaces import ICatalogBrain +from unittest import TestCase from zope.interface.verify import verifyObject -from Products.ZenHub.invalidationfilter import ( +from ..invalidationfilter import ( + _getZorCProperties, + _iszorcustprop, BaseOrganizerFilter, DeviceClass, DeviceClassInvalidationFilter, @@ -17,48 +19,46 @@ OSProcessOrganizer, OSProcessOrganizerFilter, ) - -from Products.ZCatalog.interfaces import ICatalogBrain -from mock_interface import create_interface_mock +from .mock_interface import create_interface_mock PATH = {"invalidationfilter": "Products.ZenHub.invalidationfilter"} class IgnorableClassesFilterTest(TestCase): - def setUp(self): - self.icf = IgnorableClassesFilter() + def setUp(t): + t.icf = IgnorableClassesFilter() - def test_init(self): - IInvalidationFilter.providedBy(self.icf) + def test_init(t): + IInvalidationFilter.providedBy(t.icf) # current version fails because weight attribute is not defined # icf.weight = 1 # verifyObject(IInvalidationFilter, icf) - self.assertTrue(hasattr(self.icf, "CLASSES_TO_IGNORE")) + t.assertTrue(hasattr(t.icf, "CLASSES_TO_IGNORE")) - def test_initialize(self): + def test_initialize(t): context = Mock(name="context") - self.icf.initialize(context) + t.icf.initialize(context) # No return or side-effects - def test_include(self): + def test_include(t): obj = Mock(name="object") - out = self.icf.include(obj) - self.assertEqual(out, FILTER_CONTINUE) + out = t.icf.include(obj) + t.assertEqual(out, FILTER_CONTINUE) - def test_include_excludes_classes_to_ignore(self): - self.icf.CLASSES_TO_IGNORE = str - out = self.icf.include("ignore me!") - self.assertEqual(out, FILTER_EXCLUDE) + def test_include_excludes_classes_to_ignore(t): + t.icf.CLASSES_TO_IGNORE = str + out = t.icf.include("ignore me!") + t.assertEqual(out, FILTER_EXCLUDE) class BaseOrganizerFilterTest(TestCase): - def setUp(self): - self.types = Mock(name="types") - self.bof = BaseOrganizerFilter(self.types) + def setUp(t): + t.types = Mock(name="types") + t.bof = BaseOrganizerFilter(t.types) # @patch with autospec fails (https://bugs.python.org/issue23078) # manually spec ZenPropertyManager - self.organizer = Mock( + t.organizer = Mock( name="Products.ZenRelations.ZenPropertyManager", spec_set=[ "zenPropertyIds", @@ -68,148 +68,157 @@ def setUp(self): ], ) - def test_init(self): - IInvalidationFilter.providedBy(self.bof) - verifyObject(IInvalidationFilter, self.bof) - self.assertEqual(self.bof.weight, 10) - self.assertEqual(self.bof._types, self.types) - - def test_iszorcustprop(self): - match = self.bof.iszorcustprop("no match") - self.assertEqual(match, None) - match = self.bof.iszorcustprop("cProperty") - self.assertTrue(match) - match = self.bof.iszorcustprop("zProperty") - self.assertTrue(match) - - def test_getRoot(self): + def test_init(t): + IInvalidationFilter.providedBy(t.bof) + verifyObject(IInvalidationFilter, t.bof) + t.assertEqual(t.bof.weight, 10) + t.assertEqual(t.bof._types, t.types) + + def test_iszorcustprop(t): + result = _iszorcustprop("no match") + t.assertEqual(result, None) + result = _iszorcustprop("cProperty") + t.assertTrue(result) + result = _iszorcustprop("zProperty") + t.assertTrue(result) + + def test_getRoot(t): context = Mock(name="context") - root = self.bof.getRoot(context) - self.assertEqual(root, context.dmd.primaryAq()) + root = t.bof.getRoot(context) + t.assertEqual(root, context.dmd.primaryAq()) @patch( "{invalidationfilter}.IModelCatalogTool".format(**PATH), autospec=True, spec_set=True, ) - def test_initialize(self, IModelCatalogTool): + def test_initialize(t, IModelCatalogTool): # Create a Mock object that provides the ICatalogBrain interface ICatalogBrainMock = create_interface_mock(ICatalogBrain) brain = ICatalogBrainMock() IModelCatalogTool.return_value.search.return_value = [brain] - checksum = create_autospec(self.bof.organizerChecksum) - self.bof.organizerChecksum = checksum + checksum = create_autospec(t.bof.organizerChecksum) + t.bof.organizerChecksum = checksum context = Mock(name="context") - self.bof.initialize(context) + t.bof.initialize(context) - self.assertEqual( - self.bof.checksum_map, + t.assertEqual( + t.bof.checksum_map, {brain.getPath.return_value: checksum.return_value}, ) - def test_getZorCProperties(self): + def test_getZorCProperties(t): zprop = Mock(name="zenPropertyId", spec_set=[]) - self.organizer.zenPropertyIds.return_value = [zprop, zprop] + t.organizer.zenPropertyIds.return_value = [zprop, zprop] # getZorCProperties returns a generator - results = self.bof.getZorCProperties(self.organizer) + results = _getZorCProperties(t.organizer) - self.organizer.zenPropIsPassword.return_value = False + t.organizer.zenPropIsPassword.return_value = False zId, propertyString = next(results) - self.assertEqual(zId, zprop) - self.assertEqual( - propertyString, self.organizer.zenPropertyString.return_value + t.assertEqual(zId, zprop) + t.assertEqual( + propertyString, t.organizer.zenPropertyString.return_value ) - self.organizer.zenPropertyString.assert_called_with(zprop) + t.organizer.zenPropertyString.assert_called_with(zprop) - self.organizer.zenPropIsPassword.return_value = True + t.organizer.zenPropIsPassword.return_value = True zId, propertyString = next(results) - self.assertEqual(zId, zprop) - self.assertEqual( - propertyString, self.organizer.getProperty.return_value + t.assertEqual(zId, zprop) + t.assertEqual( + propertyString, t.organizer.getProperty.return_value ) - self.organizer.getProperty.assert_called_with(zprop, "") + t.organizer.getProperty.assert_called_with(zprop, "") - with self.assertRaises(StopIteration): + with t.assertRaises(StopIteration): next(results) - def test_generateChecksum(self): - getZorCProperties = create_autospec(self.bof.getZorCProperties) + @patch( + "{invalidationfilter}._getZorCProperties".format(**PATH), + autospec=True, + spec_set=True, + ) + def test_generateChecksum(t, _getZorCProps): zprop = Mock(name="zenPropertyId", spec_set=[]) - getZorCProperties.return_value = [(zprop, "property_string")] - self.bof.getZorCProperties = getZorCProperties - md5_checksum = md5() - - self.bof.generateChecksum(self.organizer, md5_checksum) + data = (zprop, "property_string") + _getZorCProps.return_value = [data] + actual = md5() expect = md5() - expect.update("%s|%s" % (getZorCProperties(self.organizer)[0])) - getZorCProperties.assert_called_with(self.organizer) - self.assertEqual(md5_checksum.hexdigest(), expect.hexdigest()) + expect.update("%s|%s" % data) + + t.bof.generateChecksum(t.organizer, actual) + + _getZorCProps.assert_called_with(t.organizer) + t.assertEqual(actual.hexdigest(), expect.hexdigest()) - def test_organizerChecksum(self): - getZorCProperties = create_autospec(self.bof.getZorCProperties) + @patch( + "{invalidationfilter}._getZorCProperties".format(**PATH), + autospec=True, + spec_set=True, + ) + def test_organizerChecksum(t, _getZorCProps): zprop = Mock(name="zenPropertyId", spec_set=[]) - getZorCProperties.return_value = [(zprop, "property_string")] - self.bof.getZorCProperties = getZorCProperties + data = (zprop, "property_string") + _getZorCProps.return_value = [data] - out = self.bof.organizerChecksum(self.organizer) + out = t.bof.organizerChecksum(t.organizer) expect = md5() - expect.update("%s|%s" % (getZorCProperties(self.organizer)[0])) - self.assertEqual(out, expect.hexdigest()) - - def test_include_ignores_non_matching_types(self): - self.bof._types = (str,) - ret = self.bof.include(False) - self.assertEqual(ret, FILTER_CONTINUE) - - def test_include_if_checksum_changed(self): - organizerChecksum = create_autospec(self.bof.organizerChecksum) - self.bof.organizerChecksum = organizerChecksum - self.bof._types = (Mock,) + expect.update("%s|%s" % data) + t.assertEqual(out, expect.hexdigest()) + + def test_include_ignores_non_matching_types(t): + t.bof._types = (str,) + ret = t.bof.include(False) + t.assertEqual(ret, FILTER_CONTINUE) + + def test_include_if_checksum_changed(t): + organizerChecksum = create_autospec(t.bof.organizerChecksum) + t.bof.organizerChecksum = organizerChecksum + t.bof._types = (Mock,) obj = Mock(name="object", spec_set=["getPrimaryPath"]) obj.getPrimaryPath.return_value = ["dmd", "brain"] organizer_path = "/".join(obj.getPrimaryPath()) - self.bof.checksum_map = {organizer_path: "existing_checksum"} + t.bof.checksum_map = {organizer_path: "existing_checksum"} organizerChecksum.return_value = "current_checksum" - ret = self.bof.include(obj) + ret = t.bof.include(obj) - self.assertEqual(ret, FILTER_CONTINUE) + t.assertEqual(ret, FILTER_CONTINUE) - def test_include_if_checksum_unchanged(self): - organizerChecksum = create_autospec(self.bof.organizerChecksum) - self.bof.organizerChecksum = organizerChecksum + def test_include_if_checksum_unchanged(t): + organizerChecksum = create_autospec(t.bof.organizerChecksum) + t.bof.organizerChecksum = organizerChecksum existing_checksum = "checksum" current_checksum = "checksum" organizerChecksum.return_value = current_checksum - self.bof._types = (Mock,) + t.bof._types = (Mock,) obj = Mock(name="object", spec_set=["getPrimaryPath"]) obj.getPrimaryPath.return_value = ["dmd", "brain"] organizer_path = "/".join(obj.getPrimaryPath()) - self.bof.checksum_map = {organizer_path: existing_checksum} + t.bof.checksum_map = {organizer_path: existing_checksum} - ret = self.bof.include(obj) + ret = t.bof.include(obj) - self.assertEqual(ret, FILTER_EXCLUDE) + t.assertEqual(ret, FILTER_EXCLUDE) class DeviceClassInvalidationFilterTest(TestCase): - def setUp(self): - self.dcif = DeviceClassInvalidationFilter() + def setUp(t): + t.dcif = DeviceClassInvalidationFilter() - def test_init(self): - IInvalidationFilter.providedBy(self.dcif) - verifyObject(IInvalidationFilter, self.dcif) - self.assertEqual(self.dcif._types, (DeviceClass,)) + def test_init(t): + IInvalidationFilter.providedBy(t.dcif) + verifyObject(IInvalidationFilter, t.dcif) + t.assertEqual(t.dcif._types, (DeviceClass,)) - def test_getRoot(self): + def test_getRoot(t): context = Mock(name="context") - root = self.dcif.getRoot(context) - self.assertEqual(root, context.dmd.Devices.primaryAq()) + root = t.dcif.getRoot(context) + t.assertEqual(root, context.dmd.Devices.primaryAq()) @patch( "{invalidationfilter}.BaseOrganizerFilter.generateChecksum".format( @@ -218,7 +227,7 @@ def test_getRoot(self): autospec=True, spec_set=True, ) - def test_generateChecksum(self, super_generateChecksum): + def test_generateChecksum(t, super_generateChecksum): md5_checksum = md5() organizer = Mock( name="Products.ZenRelations.ZenPropertyManager", @@ -228,44 +237,44 @@ def test_generateChecksum(self, super_generateChecksum): rrdTemplate.exportXml.return_value = "some exemel" organizer.rrdTemplates.return_value = [rrdTemplate] - self.dcif.generateChecksum(organizer, md5_checksum) + t.dcif.generateChecksum(organizer, md5_checksum) # We cannot validate the output of the current version, refactor needed rrdTemplate.exportXml.was_called_once() super_generateChecksum.assert_called_with( - self.dcif, organizer, md5_checksum + t.dcif, organizer, md5_checksum ) class OSProcessOrganizerFilterTest(TestCase): - def test_init(self): + def test_init(t): ospof = OSProcessOrganizerFilter() IInvalidationFilter.providedBy(ospof) verifyObject(IInvalidationFilter, ospof) - self.assertEqual(ospof._types, (OSProcessOrganizer,)) + t.assertEqual(ospof._types, (OSProcessOrganizer,)) - def test_getRoot(self): + def test_getRoot(t): ospof = OSProcessOrganizerFilter() context = Mock(name="context") root = ospof.getRoot(context) - self.assertEqual(root, context.dmd.Processes.primaryAq()) + t.assertEqual(root, context.dmd.Processes.primaryAq()) class OSProcessClassFilterTest(TestCase): - def setUp(self): - self.ospcf = OSProcessClassFilter() + def setUp(t): + t.ospcf = OSProcessClassFilter() - def test_init(self): - IInvalidationFilter.providedBy(self.ospcf) - verifyObject(IInvalidationFilter, self.ospcf) + def test_init(t): + IInvalidationFilter.providedBy(t.ospcf) + verifyObject(IInvalidationFilter, t.ospcf) - self.assertEqual(self.ospcf._types, (OSProcessClass,)) + t.assertEqual(t.ospcf._types, (OSProcessClass,)) - def test_getRoot(self): + def test_getRoot(t): context = Mock(name="context") - root = self.ospcf.getRoot(context) - self.assertEqual(root, context.dmd.Processes.primaryAq()) + root = t.ospcf.getRoot(context) + t.assertEqual(root, context.dmd.Processes.primaryAq()) @patch( "{invalidationfilter}.BaseOrganizerFilter.generateChecksum".format( @@ -274,7 +283,7 @@ def test_getRoot(self): autospec=True, spec_set=True, ) - def test_generateChecksum(self, super_generateChecksum): + def test_generateChecksum(t, super_generateChecksum): organizer = Mock( name="Products.ZenRelations.ZenPropertyManager", spec_set=["property_id", "_properties"], @@ -284,11 +293,11 @@ def test_generateChecksum(self, super_generateChecksum): organizer.property_id = "value" md5_checksum = md5() - self.ospcf.generateChecksum(organizer, md5_checksum) + t.ospcf.generateChecksum(organizer, md5_checksum) expect = md5() expect.update("%s|%s" % (prop["id"], getattr(organizer, prop["id"]))) - self.assertEqual(md5_checksum.hexdigest(), expect.hexdigest()) + t.assertEqual(md5_checksum.hexdigest(), expect.hexdigest()) super_generateChecksum.assert_called_with( - self.ospcf, organizer, md5_checksum + t.ospcf, organizer, md5_checksum ) diff --git a/Products/ZenHub/tests/test_invalidationmanager.py b/Products/ZenHub/tests/test_invalidationmanager.py index fb2cd7460a..fcb28b0117 100644 --- a/Products/ZenHub/tests/test_invalidationmanager.py +++ b/Products/ZenHub/tests/test_invalidationmanager.py @@ -12,10 +12,9 @@ from unittest import TestCase from mock import patch, Mock, create_autospec, MagicMock, sentinel, ANY -from mock_interface import create_interface_mock - from Products.ZenHub.zenhub import ZenHub -from Products.ZenHub.invalidationmanager import ( + +from ..invalidationmanager import ( coroutine, DeviceComponent, FILTER_EXCLUDE, @@ -31,6 +30,7 @@ set_sink, transform_obj, ) +from .mock_interface import create_interface_mock PATH = {"src": "Products.ZenHub.invalidationmanager"} @@ -49,7 +49,6 @@ def setUp(t): t.dmd = Mock( name="dmd", spec_set=["getPhysicalRoot", "pauseHubNotifications"] ) - t.log = Mock(name="log", spec_set=["debug", "warn", "info"]) t.syncdb = Mock(name="ZenHub.async_syncdb", spec_set=[]) t.poll_invalidations = Mock( name="ZenHub.storage.poll_invalidations", spec_set=[] @@ -57,12 +56,14 @@ def setUp(t): t.send_event = Mock(ZenHub.sendEvent, name="ZenHub.sendEvent") t.im = InvalidationManager( - t.dmd, t.log, t.syncdb, t.poll_invalidations, t.send_event + t.dmd, t.syncdb, t.poll_invalidations, t.send_event ) + def tearDown(t): + logging.disable(logging.NOTSET) + def test___init__(t): t.assertEqual(t.im._InvalidationManager__dmd, t.dmd) - t.assertEqual(t.im.log, t.log) t.assertEqual(t.im._InvalidationManager__syncdb, t.syncdb) t.assertEqual( t.im._InvalidationManager__poll_invalidations, t.poll_invalidations @@ -79,21 +80,21 @@ def test___init__(t): def test_initialize_invalidation_filters(t, getUtilitiesFor): MockIInvalidationFilter = create_interface_mock(IInvalidationFilter) filters = [MockIInvalidationFilter() for i in range(3)] - # weighted in reverse order - for i, filter in enumerate(filters): - filter.weight = 10 - i + # Weighted in reverse order + for i, fltr in enumerate(filters): + fltr.weight = 10 - i getUtilitiesFor.return_value = [ ("f%s" % i, f) for i, f in enumerate(filters) ] - t.im.initialize_invalidation_filters() + initialized_filters = t.im.initialize_invalidation_filters(t.dmd) - for filter in filters: - filter.initialize.assert_called_with(t.dmd) + for fltr in filters: + fltr.initialize.assert_called_with(t.dmd) # check sorted by weight filters.reverse() - t.assertEqual(t.im._invalidation_filters, filters) + t.assertListEqual(initialized_filters, filters) @patch("{src}.time".format(**PATH), autospec=True) def test_process_invalidations(t, time): @@ -198,16 +199,16 @@ def setUp(t): def test_invalidation_pipeline(t): t.invalidation_pipeline.run(t.oid) - t.assertEqual(t.sink, set([t.oid])) + t.assertEqual(t.sink, {t.oid}) def test__build_pipeline(t): __pipeline = t.invalidation_pipeline._build_pipeline() __pipeline.send(t.oid) - t.assertEqual(t.sink, set([t.oid])) + t.assertEqual(t.sink, {t.oid}) @patch("{src}.log".format(**PATH), autospec=True) - def test_run_handles_exceptions(t, log): + def test_run_handles_exceptions(t, log_): """An exception in any of the coroutines will first raise the exception then cause StopIteration exceptions on subsequent runs. we handle the first exception and rebuild the pipeline @@ -219,8 +220,8 @@ def test_run_handles_exceptions(t, log): t.invalidation_pipeline.run(x) # causes an exception t.invalidation_pipeline.run(t.oid) - log.exception.assert_called_with(ANY) - t.assertEqual(t.sink, set([t.oid])) + log_.exception.assert_called_with(ANY) + t.assertEqual(t.sink, {t.oid}) # ensure the dereferenced pipeline is cleaned up safely import gc @@ -380,7 +381,7 @@ class set_sink_Test(TestCase): def test_set_sink_accepts_a_set(t): output = set() set_sink_pipe = set_sink(output) - set_sink_pipe.send({"a", "a", "b", "c"} or ("a",)) + set_sink_pipe.send({"a", "b", "c"} or ("a",)) t.assertEqual(output, {"a", "b", "c"}) def test_set_sink_accepts_a_tuple(t): diff --git a/Products/ZenHub/tests/test_invalidationoid.py b/Products/ZenHub/tests/test_invalidationoid.py index 36c1a8d57c..d3b7839d0a 100644 --- a/Products/ZenHub/tests/test_invalidationoid.py +++ b/Products/ZenHub/tests/test_invalidationoid.py @@ -1,18 +1,14 @@ -from unittest import TestCase from mock import Mock +from unittest import TestCase +from zope.component import adaptedBy +from zope.interface.verify import verifyObject -# Breaks unittest independence due to -# ImportError: No module named CMFCore.DirectoryView -from Products.ZenHub.invalidationoid import ( +from ..invalidationoid import ( DefaultOidTransform, - PrimaryPathObjectManager, IInvalidationOid, - DeviceOidTransform, + PrimaryPathObjectManager, ) -from zope.interface.verify import verifyObject -from zope.component import adaptedBy - class DefaultOidTransformTest(TestCase): def setUp(self): @@ -36,35 +32,3 @@ def test_init(self): def test_transformOid(self): ret = self.default_oid_transform.transformOid("unmodified oid") self.assertEqual(ret, "unmodified oid") - - -class DeviceOidTransformTest(TestCase): - def setUp(self): - self.obj = Mock(spec_set=PrimaryPathObjectManager) - self.device_oid_transform = DeviceOidTransform(self.obj) - - def test_implements_IInvalidationOid(self): - # Provides the interface - IInvalidationOid.providedBy(self.device_oid_transform) - # Implements the interface it according to spec - verifyObject(IInvalidationOid, self.device_oid_transform) - - def test_init(self): - self.assertEqual(self.device_oid_transform._obj, self.obj) - - def test_transformOid(self): - """returns unmodified oid, if _obj has no device attribute""" - self.assertFalse(hasattr(self.obj, "device")) - ret = self.device_oid_transform.transformOid("unmodified oid") - self.assertEqual(ret, "unmodified oid") - - def test_transformOid_returns_device_oid(self): - """returns obj.device()._p_oid if obj.device exists""" - obj = Mock(name="PrimaryPathObjectManager", spec_set=["device"]) - device = Mock(name="device", spec_set=["_p_oid"]) - obj.device.return_value = device - - device_oid_transform = DeviceOidTransform(obj) - ret = device_oid_transform.transformOid("ignored oid") - - self.assertEqual(ret, obj.device.return_value._p_oid) diff --git a/Products/ZenHub/tests/test_invalidations.py b/Products/ZenHub/tests/test_invalidations.py index 8be0ead9c4..35768eb6df 100644 --- a/Products/ZenHub/tests/test_invalidations.py +++ b/Products/ZenHub/tests/test_invalidations.py @@ -1,24 +1,22 @@ +import logging + from unittest import TestCase -from mock import Mock, patch, call, MagicMock, create_autospec +from mock import Mock, patch, call, MagicMock +from zope.component import adaptedBy -from Products.ZenHub.invalidations import ( - betterObjectEventNotify, +from ..invalidations import ( + _get_event, + _notify_event_subscribers, defer, - handle_oid, PrimaryPathObjectManager, - DeviceComponent, DeletionEvent, UpdateEvent, InvalidationProcessor, IInvalidationProcessor, - IITreeSet, IHubCreatedEvent, - INVALIDATIONS_PAUSED, ) +from .mock_interface import create_interface_mock -from mock_interface import create_interface_mock - -from zope.component import adaptedBy """ These tests are currently excellent examples of tests with excessive patching @@ -27,13 +25,15 @@ Complicated Mocks indicate it reaches too deeply into external objects """ +PATH = {"src": "Products.ZenHub.invalidations"} + -class invalidationsTest(TestCase): - @patch("Products.ZenHub.invalidations.getGlobalSiteManager", autospec=True) - @patch("Products.ZenHub.invalidations.providedBy", autospec=True) - @patch("Products.ZenHub.invalidations.giveTimeToReactor", autospec=True) - def test_betterObjectEventNotify( - self, giveTimeToReactor, providedBy, getGlobalSiteManager +class NotifyEventSubscribersTest(TestCase): + @patch("{src}.getGlobalSiteManager".format(**PATH), autospec=True) + @patch("{src}.providedBy".format(**PATH), autospec=True) + @patch("{src}.giveTimeToReactor".format(**PATH), autospec=True) + def test_notify_event_subscribers( + t, giveTimeToReactor, providedBy, getGlobalSiteManager ): gsm = Mock(name="global_site_manager", spec_set=["adapters"]) getGlobalSiteManager.return_value = gsm @@ -45,7 +45,7 @@ def test_betterObjectEventNotify( gsm.adapters.subscriptions.return_value = subscriptions event = Mock(name="event", spec_set=["object"]) - ret = betterObjectEventNotify(event) + ret = _notify_event_subscribers(event) # Gets a list of subscriptions that adapt this event's interface gsm.adapters.subscriptions.assert_called_with( @@ -60,180 +60,131 @@ def test_betterObjectEventNotify( ) # InlineCallbacks return a Deferred - self.assertIsInstance(ret, defer.Deferred) + t.assertIsInstance(ret, defer.Deferred) # Has no return value - self.assertEqual(ret.result, None) + t.assertEqual(ret.result, None) + - def setUp(self): - self.dmd = Mock(name="dmd", spec_set=["_p_jar"]) +class GetEventTest(TestCase): + def setUp(t): + t.dmd = Mock(name="dmd", spec_set=["_p_jar"]) # object must be of type PrimaryPathObjectManager or DeviceComponent - self.obj = Mock(name="invalid type", spec_set=[]) - self.oid = "oid" - self._p_jar = {self.oid: self.obj} - self.dmd._p_jar = self._p_jar - - def test_handle_oid(self): - """object must be of type PrimaryPathObjectManager or DeviceComponent - or it will be dropped, and handle_oid returns None - """ - self.assertFalse(isinstance(self.obj, PrimaryPathObjectManager)) - self.assertFalse(isinstance(self.obj, DeviceComponent)) - - ret = handle_oid(self.dmd, self.oid) - self.assertEqual(ret, None) - - @patch( - "Products.ZenHub.invalidations.betterObjectEventNotify", autospec=True - ) - def test_handle_oid_deletion(self, betterObjectEventNotify): - # Replace test object with a valid type + t.obj = Mock(name="invalid type", spec_set=[]) + t.oid = "oid" + t._p_jar = {t.oid: t.obj} + t.dmd._p_jar = t._p_jar + + def test_get_deletion_event(t): obj = MagicMock( PrimaryPathObjectManager, name="primary_path_object_manager", ) - self.dmd._p_jar = {self.oid: obj} - self.assertEqual(obj, self.dmd._p_jar[self.oid]) - self.assertTrue(isinstance(obj, PrimaryPathObjectManager)) - + t.dmd._p_jar = {t.oid: obj} # obj.__of__(dmd).primaryAq() ensures we get the primary path primary_aq = obj.__of__.return_value.primaryAq # raising a KeyError indicates a deleted object primary_aq.side_effect = KeyError() - # Returns the result of betterObjectEventNotify(event) - # where event is a new UpdateEvent or DeleteEvent instance - # mock betterObjectEventNotify to pass back its one input - betterObjectEventNotify.side_effect = lambda event: event - - # execute - ret = handle_oid(self.dmd, self.oid) + t.assertEqual(obj, t.dmd._p_jar[t.oid]) + t.assertTrue(isinstance(obj, PrimaryPathObjectManager)) - # validate side effects - obj.__of__.assert_called_with(self.dmd) - primary_aq.assert_called_once_with() + ret = _get_event(t.dmd, obj, t.oid) + t.assertIsInstance(ret, DeletionEvent) - # validate return value - # should be a deferred wrapping a deletion event, yielded from BOEN - # but we had to short-circut betterObjectEventNotify - self.assertIsInstance(ret, DeletionEvent) - - @patch( - "Products.ZenHub.invalidations.betterObjectEventNotify", autospec=True - ) - def test_handle_oid_update(self, betterObjectEventNotify): - # Replace test object with a valid type + def test_get_updated_event(t): obj = MagicMock( PrimaryPathObjectManager, name="primary_path_object_manager", ) - self.dmd._p_jar = {self.oid: obj} - - # obj.__of__(dmd).primaryAq() ensures we get the primary path - primary_aq = obj.__of__.return_value.primaryAq - - # Returns the result of betterObjectEventNotify(event) - # where event is a new UpdateEvent or DeleteEvent instance - # mock betterObjectEventNotify to pass back its one input - betterObjectEventNotify.side_effect = lambda event: event - - # execute - ret = handle_oid(self.dmd, self.oid) + t.dmd._p_jar = {t.oid: obj} + obj.__of__.return_value.primaryAq.return_value = obj - # validate side effects - obj.__of__.assert_called_with(self.dmd) - primary_aq.assert_called_once_with() + t.assertEqual(obj, t.dmd._p_jar[t.oid]) + t.assertTrue(isinstance(obj, PrimaryPathObjectManager)) - # validate return value - # should be a deferred wrapping a deletion event, - # yielded from betterObjectEventNotify - # but we had to short-circut betterObjectEventNotify - self.assertIsInstance(ret, UpdateEvent) + actual = _get_event(t.dmd, obj, t.oid) + t.assertIsInstance(actual, UpdateEvent) class InvalidationProcessorTest(TestCase): - def setUp(self): - self.patch_getGlobalSiteManager = patch( - "Products.ZenHub.invalidations.getGlobalSiteManager", autospec=True + def setUp(t): + logging.disable(logging.CRITICAL) + t.patch_getGlobalSiteManager = patch( + "{src}.getGlobalSiteManager".format(**PATH), autospec=True ) - self.getGlobalSiteManager = self.patch_getGlobalSiteManager.start() + t.getGlobalSiteManager = t.patch_getGlobalSiteManager.start() - self.ip = InvalidationProcessor() - self.ip._hub = Mock(name="zenhub", spec_set=["dmd"]) - self.ip._hub_ready = Mock(name="_hub_ready_deferred") - self.ip._invalidation_queue = Mock(spec_set=IITreeSet) + t.ip = InvalidationProcessor() + t.ip._hub = Mock(name="zenhub", spec_set=["dmd"]) + t.ip._hub.dmd._p_jar = {} + t.ip._hub_ready = Mock(name="_hub_ready_deferred") - def tearDown(self): - self.patch_getGlobalSiteManager.stop() + def tearDown(t): + logging.disable(logging.NOTSET) + t.patch_getGlobalSiteManager.stop() - def test_init(self): + def test_init(t): IInvalidationProcessor.implementedBy(InvalidationProcessor) ip = InvalidationProcessor() IInvalidationProcessor.providedBy(ip) - # current version cannot be verified, setHub attribute not provided - # verifyObject(IInvalidationProcessor, processor) - self.assertIsInstance(ip._invalidation_queue, IITreeSet) - self.assertIsInstance(ip._hub_ready, defer.Deferred) + t.assertIsInstance(ip._hub_ready, defer.Deferred) # Registers its onHubCreated trigger, to wait for a HubCreated event - gsm = self.getGlobalSiteManager.return_value + gsm = t.getGlobalSiteManager.return_value gsm.registerHandler.assert_called_with(ip.onHubCreated) - def test_onHubCreated(self): + def test_onHubCreated(t): """this method gets triggered by a IHubCreatedEvent event""" # Is an adapter for IHubCreatedEvent type events - self.assertEqual( + t.assertEqual( list(adaptedBy(InvalidationProcessor.onHubCreated)), [IHubCreatedEvent], ) IHubCreatedEventMock = create_interface_mock(IHubCreatedEvent) event = IHubCreatedEventMock() - self.ip._hub_ready = Mock(spec_set=defer.Deferred) + t.ip._hub_ready = Mock(spec_set=defer.Deferred) - self.ip.onHubCreated(event) + t.ip.onHubCreated(event) # _hub is set to the hub specified in the IHubCreatedEvent - self.assertEqual(self.ip._hub, event.hub) + t.assertEqual(t.ip._hub, event.hub) # the _hub_ready deffered gets called back / triggered - self.ip._hub_ready.callback.assert_called_with(self.ip._hub) - - @patch("Products.ZenHub.invalidations.u64", autospec=True) - def test_processQueue(self, u64): - self.ip._hub.dmd.pauseHubNotifications = False - self.ip._dispatch = create_autospec(self.ip._dispatch) + t.ip._hub_ready.callback.assert_called_with(t.ip._hub) + @patch("{src}._get_event".format(**PATH), autospec=True) + @patch("{src}._notify_event_subscribers".format(**PATH), autospec=True) + def test_no_such_oids(t, notify_, get_event_): oids = ["oid1", "oid2", "oid3"] - ret = self.ip.processQueue(oids) - - u64.assert_has_calls([call(oid) for oid in oids]) - self.ip._invalidation_queue.insert.assert_has_calls( - [call(u64.return_value) for _ in oids] - ) - # WARNING: intended to return i>0 if successful, will currently - # reutrn 0 if a single oid passed in, even if successful - self.assertEqual(ret.result, len(oids) - 1) + d = t.ip.processQueue(oids) + handled, ignored = d.result - def test_processQueue_paused(self): - self.ip._hub.dmd.pauseHubNotifications = True + t.assertTupleEqual((handled, ignored), (0, 0)) - ret = self.ip.processQueue("oids") - - self.assertEqual(ret.result, INVALIDATIONS_PAUSED) - - @patch("Products.ZenHub.invalidations.handle_oid", autospec=True) - def test_dispatch(self, handle_oid): - handle_oid.fail = "derp" - - dmd = self.ip._hub.dmd - oid = "oid" - ioid = "ioid" - queue = self.ip._invalidation_queue + @patch("{src}._get_event".format(**PATH), autospec=True) + @patch("{src}._notify_event_subscribers".format(**PATH), autospec=True) + def test_ignored_oids(t, notify_, get_event_): + oids = ["oid1", "oid2", "oid3"] + objs = [Mock(), Mock(), Mock()] + t.ip._hub.dmd._p_jar.update(dict(zip(oids, objs))) + d = t.ip.processQueue(oids) + handled, ignored = d.result - ret = self.ip._dispatch(dmd, oid, ioid, queue) + t.assertTupleEqual((handled, ignored), (0, 3)) - handle_oid.assert_called_with(dmd, oid) - queue.remove.assert_called_with(ioid) + @patch("{src}._get_event".format(**PATH), autospec=True) + @patch("{src}._notify_event_subscribers".format(**PATH), autospec=True) + def test_mix_of_oids(t, notify_, get_event_): + oids = ["oid1", "oid2", "oid3"] + objs = [ + MagicMock(PrimaryPathObjectManager), + Mock(), + MagicMock(PrimaryPathObjectManager), + ] + t.ip._hub.dmd._p_jar.update(dict(zip(oids, objs))) + d = t.ip.processQueue(oids) + handled, ignored = d.result - self.assertEqual(ret, handle_oid.return_value) + t.assertTupleEqual((handled, ignored), (2, 1)) diff --git a/Products/ZenHub/tests/test_pinger.py b/Products/ZenHub/tests/test_pinger.py new file mode 100644 index 0000000000..3285f8fbe8 --- /dev/null +++ b/Products/ZenHub/tests/test_pinger.py @@ -0,0 +1,72 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from mock import Mock, patch, ANY +from unittest import TestCase + +from Products.ZenHub.pinger import PingZenHub + +PATH = {"src": "Products.ZenHub.pinger"} + + +class PingZenHubTest(TestCase): + """Test the PingZenHub class.""" + + def setUp(t): + t.zenhub = Mock() + t.client = Mock() + t.interval = 30 + # Patch external dependencies + needs_patching = ["task", "logging"] + t.patchers = {} + for target in needs_patching: + patched = patch( + "{src}.{target}".format(target=target, **PATH), + autospec=True, + ) + t.patchers[target] = patched + name = target.rpartition(".")[-1] + setattr(t, name, patched.start()) + t.addCleanup(patched.stop) + + t.pzh = PingZenHub(t.zenhub, interval=t.interval) + + def test_start(t): + t.pzh.start() + + loop = t.task.LoopingCall.return_value + loop.start.assert_called_once_with(t.interval, now=False) + + def test_stop_before_start(t): + t.pzh.stop() + + loop = t.task.LoopingCall.return_value + t.assertFalse(loop.called) + + def test_stop_after_start(t): + t.pzh.start() + t.pzh.stop() + + loop = t.task.LoopingCall.return_value + loop.stop.assert_called_once_with() + + def test_call(t): + t.pzh() + t.zenhub.ping.assert_called_once_with() + + def test___call__failed(t): + logger = t.logging.getLogger.return_value + ex = ValueError("boom") + t.zenhub.ping.side_effect = ex + + t.pzh() + + logger.error.assert_called_once_with(ANY, ex) diff --git a/Products/ZenHub/tests/test_zenhub.py b/Products/ZenHub/tests/test_zenhub.py index 46cee949f1..65dece917b 100644 --- a/Products/ZenHub/tests/test_zenhub.py +++ b/Products/ZenHub/tests/test_zenhub.py @@ -145,7 +145,7 @@ def test___init__( # Event Handler shortcut t.assertEqual(zh.zem, zh.dmd.ZenEventManager) - # Messageing config, including work and invalidations + # Messaging config, including work and invalidations # Patched internal import of Products.ZenMessaging.queuemessaging load_config_override.assert_called_with( "twistedpublisher.zcml", @@ -396,14 +396,6 @@ def test_getRRDStats(t): ) t.assertEqual(ret, t.zh._metric_manager.get_rrd_stats.return_value) - def test_processQueue(t): - t.zh.processQueue() - t.zh._invalidation_manager.process_invalidations.assert_called_with() - - def test__initialize_invalidation_filters(t): - t.zh._initialize_invalidation_filters() - t.zh._invalidation_manager.initialize_invalidation_filters.assert_called_with() # noqa E501 - @patch("{src}.Event".format(**PATH), autospec=True) def test_sendEvent(t, Event): event = {"device": "x", "component": "y", "summary": "msg"} diff --git a/Products/ZenHub/tests/test_zenhubclient.py b/Products/ZenHub/tests/test_zenhubclient.py new file mode 100644 index 0000000000..ddd19d9a12 --- /dev/null +++ b/Products/ZenHub/tests/test_zenhubclient.py @@ -0,0 +1,276 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +import logging + +from unittest import TestCase + +from mock import call, MagicMock, Mock, patch, sentinel +from twisted.internet import defer, reactor +from twisted.python.failure import Failure + +from Products.ZenHub.zenhubclient import HubDown, ZenHubClient + +PATH = {"src": "Products.ZenHub.zenhubclient"} + + +class DisableLoggerLayer(object): + @classmethod + def setUp(self): + logging.disable(logging.CRITICAL) + + +class BrokerSimulator(object): + """Simulates a twisted.spread 'broker' object.""" + + def __init__(self): + self.transport = Mock(spec=["socket"]) + self.factory = Mock(spec=["login"]) + self.callback = None + + def notifyOnDisconnect(self, callback): + self.callback = callback + + +class ClientServiceSimulator(object): + """Simulates a twisted.application.internet.ClientService object.""" + + def __init__(self, endpoint, factory, retryPolicy, prepareConnection): + self.endpoint = endpoint + self.factory = factory + self.policy = retryPolicy + self.prepare_connection = prepareConnection + self.broker = BrokerSimulator() + self.deferred = defer.Deferred() + + def startService(self): + self.prepare_connection(self.broker) + + def stopService(self): + pass + + def whenConnected(self): + return self.deferred + + +class ZenHubClientTest(TestCase): + """Test the ZenHubClient class.""" + + layer = DisableLoggerLayer + + def setUp(t): + t.reactor = Mock(reactor, autospec=True) + t.endpoint = sentinel.endpoint + t.credentials = Mock() + t.app = Mock() + t.timeout = 10 + t.worklistId = "default" + t.zenhubref = Mock() + t.broker = BrokerSimulator() + t.startd = defer.Deferred() + t.stopd = defer.Deferred() + t.instanceId = "zenhub" + + # Patch external dependencies + needs_patching = [ + "ClientService", + "setKeepAlive", + "ZenPBClientFactory", + ] + t.patchers = {} + for target in needs_patching: + patched = patch( + "{src}.{target}".format(target=target, **PATH), + autospec=True, + ) + t.patchers[target] = patched + name = target.rpartition(".")[-1] + setattr(t, name, patched.start()) + t.addCleanup(patched.stop) + + t.client = t.ClientService.return_value + t.client.whenConnected.return_value = t.startd + + t.zhc = ZenHubClient( + t.app, + t.endpoint, + t.credentials, + t.timeout, + reactor=t.reactor, + ) + + def _stopService(): + t.zhc._disconnected() + return t.stopd + + init_args = list(t.ClientService.call_args)[1] + callback = init_args["prepareConnection"] + t.client.startService.side_effect = lambda: callback(t.broker) + t.client.stopService.side_effect = _stopService + t.broker.factory.login.return_value = defer.succeed(t.zenhubref) + t.zenhubref.callRemote.return_value = defer.succeed(t.instanceId) + + def test_initial_state(t): + t.assertFalse(t.zhc.is_connected) + t.assertIsNone(t.zhc.instance_id) + t.assertEqual(len(t.zhc.services), 0) + + def test_start(t): + d = t.zhc.start() + + t.assertIs(d, t.startd) + t.assertTrue(t.zhc.is_connected) + t.assertEqual(t.zhc.instance_id, t.instanceId) + t.assertEqual(len(t.zhc.services), 0) + + def test_stop_without_start(t): + d = t.zhc.stop() + + t.assertIs(d, t.stopd) + t.assertFalse(t.zhc.is_connected) + t.assertEqual(len(t.zhc.services), 0) + + def test_stop_after_start(t): + _ = t.zhc.start() + d = t.zhc.stop() + + t.assertIs(d, t.stopd) + t.assertFalse(t.zhc.is_connected) + t.assertEqual(len(t.zhc.services), 0) + + def test_when_connected(t): + cb1 = MagicMock() + cb2 = MagicMock() + t.zhc.notify_on_connect(cb1) + t.zhc.notify_on_connect(cb2) + + _ = t.zhc.start() + + t.assertTrue(cb1.called) + t.assertTrue(cb2.called) + + def test_when_disconnected(t): + cb1 = MagicMock() + cb2 = MagicMock() + t.zhc.notify_on_disconnect(cb1) + t.zhc.notify_on_disconnect(cb2) + + _ = t.zhc.start() + _ = t.zhc.stop() + + t.assertTrue(cb1.called) + t.assertTrue(cb2.called) + + def test_ping_without_start(t): + d = t.zhc.ping() + + t.assertIsInstance(d.result, Failure) + t.assertIsInstance(d.result.value, HubDown) + + # silence 'Unhandled error in Deferred' + d.addErrback(lambda x: None) + + def test_register_worker_without_start(t): + d = t.zhc.register_worker("a", "b", "c") # arg values don't matter + + t.assertIsInstance(d.result, Failure) + t.assertIsInstance(d.result.value, HubDown) + + # silence 'Unhandled error in Deferred' + d.addErrback(lambda x: None) + + def test_unregister_worker_without_start(t): + d = t.zhc.unregister_worker("a", "b") # arg values don't matter + + t.assertIsInstance(d.result, Failure) + t.assertIsInstance(d.result.value, HubDown) + + # silence 'Unhandled error in Deferred' + d.addErrback(lambda x: None) + + def test_get_service_without_start(t): + d = t.zhc.get_service("a", "b", "c", {}) # arg values don't matter + + t.assertIsInstance(d.result, Failure) + t.assertIsInstance(d.result.value, HubDown) + + # silence 'Unhandled error in Deferred' + d.addErrback(lambda x: None) + + def test_ping(t): + t.zhc.start() + t.zenhubref.callRemote.return_value = defer.succeed("pong") + + d = t.zhc.ping() + + last_call = t.zenhubref.callRemote.call_args_list[-1] + name, _ = last_call + t.assertEqual(name[0], "ping") + t.assertEqual("pong", d.result) + + def test_register_worker(t): + t.zhc.start() + t.zenhubref.callRemote.return_value = defer.succeed(None) + worker = Mock() + workerId = "default_0" + worklistId = "default" + + t.zhc.register_worker(worker, workerId, worklistId) + + last_call = t.zenhubref.callRemote.call_args_list[-1] + expected = call( + "reportForWork", worker, name=workerId, worklistId=worklistId + ) + t.assertEqual(last_call, expected) + + def test_unregister_worker(t): + t.zhc.start() + t.zenhubref.callRemote.return_value = defer.succeed(None) + workerId = "default_0" + worklistId = "default" + + t.zhc.unregister_worker(workerId, worklistId) + + last_call = t.zenhubref.callRemote.call_args_list[-1] + expected = call("resignFromWork", name=workerId, worklistId=worklistId) + t.assertEqual(last_call, expected) + + def test_get_new_service(t): + t.zhc.start() + service = Mock() + t.zenhubref.callRemote.return_value = defer.succeed(service) + name = "Products.ZenCollector.services.ConfigService.ConfigService" + monitor = "localhost" + listener = Mock() + options = {} + + d = t.zhc.get_service(name, monitor, listener, options) + + last_call = t.zenhubref.callRemote.call_args_list[-1] + expected = call("getService", name, monitor, listener, options) + t.assertEqual(last_call, expected) + t.assertEqual(d.result, service) + t.assertIn(name, t.zhc.services) + t.assertEqual(t.zhc.services[name], service) + + def test_get_cached_service(t): + t.zhc.start() + name = "PingPerformance" + service = Mock() + t.zhc._services[name] = service + monitor = "localhost" + listener = Mock() + options = {} + + d = t.zhc.get_service(name, monitor, listener, options) + + t.zenhubref.callRemote.assert_called_once_with("getHubInstanceId") + t.assertEqual(d.result, service) diff --git a/Products/ZenHub/tests/test_zenhubworker.py b/Products/ZenHub/tests/test_zenhubworker.py index 4eedceb60a..6bda861843 100644 --- a/Products/ZenHub/tests/test_zenhubworker.py +++ b/Products/ZenHub/tests/test_zenhubworker.py @@ -11,24 +11,21 @@ import sys -from mock import patch, sentinel, call, Mock, create_autospec, ANY, MagicMock +from mock import patch, sentinel, call, Mock, create_autospec, ANY from unittest import TestCase from Products.ZenHub.zenhubworker import ( _CumulativeWorkerStats, ContinuousProfiler, - defer, IDLE, IMetricManager, pb, PB_PORT, - PingZenHub, RemoteBadMonitor, ServiceReference, ServiceReferenceFactory, UnknownServiceError, ZCmdBase, - ZenHubClient, ZENHUB_MODULE, ZenHubWorker, ) @@ -71,6 +68,7 @@ def setUp(t): t.options.hubpassword = sentinel.hubpassword t.options.workerid = sentinel.workerid t.options.monitor = sentinel.monitor + t.options.localport = 12345 # Patch external dependencies needs_patching = [ @@ -82,12 +80,15 @@ def setUp(t): "ContinuousProfiler", "MetricManager", "Metrology", + "PingZenHub", "ServiceLoader", "ServiceManager", "ServiceReferenceFactory", "ServiceRegistry", "UsernamePassword", "ZenHubClient", + "serverFromString", + "LocalServer", ] t.patchers = {} for target in needs_patching: @@ -151,15 +152,28 @@ def test___init__(t): "tcp:%s:%s" % (t.zhw.options.hubhost, t.zhw.options.hubport), ) t.ZenHubClient.assert_called_once_with( - t.reactor, + t.zhw, t.clientFromString.return_value, t.UsernamePassword.return_value, - t.zhw, t.zhw.options.hub_response_timeout, - t.zhw.worklistId, + t.reactor, ) t.assertEqual(t.ZenHubClient.return_value, t.zhw._ZenHubWorker__client) + t.PingZenHub.assert_called_once_with(t.ZenHubClient.return_value) + + t.serverFromString.assert_called_once_with( + t.reactor, + "tcp:{}:interface=127.0.0.1".format(t.zhw.options.localport), + ) + t.LocalServer.assert_called_once_with( + t.reactor, t.serverFromString.return_value + ) + server = t.LocalServer.return_value + server.add_resource.assert_has_calls( + [call("zenhub", ANY), call("stats", ANY)] + ) + t.MetricManager.assert_called_with( daemon_tags={ "zenoss_daemon": "zenhub_worker_%s_%s" @@ -180,6 +194,14 @@ def test___init__(t): name="zenhub_worker_metricmanager", ) + def test_getZenHubStatus_disconnected(t): + t.zhw._ZenHubWorker__client.is_connected = False + t.assertEqual(t.zhw.getZenHubStatus(), "disconnected") + + def test_getZenHubStatus_connected(t): + t.zhw._ZenHubWorker__client.is_connected = True + t.assertEqual(t.zhw.getZenHubStatus(), "connected") + @patch("{src}.signal".format(**PATH), autospec=True) def test_start(t, signal): signal.SIGUSR1 = sentinel.SIGUSR1 @@ -195,11 +217,14 @@ def test_start(t, signal): ) t.ZenHubClient.return_value.start.assert_called_once_with() + t.LocalServer.return_value.start.assert_called_once_with() t.MetricManager.return_value.start.assert_called_once_with() t.reactor.addSystemEventTrigger.assert_has_calls( [ call("before", "shutdown", t.ZenHubClient.return_value.stop), + call("before", "shutdown", t.PingZenHub.return_value.stop), + call("before", "shutdown", t.LocalServer.return_value.stop), call("before", "shutdown", t.MetricManager.return_value.stop), ] ) @@ -409,368 +434,6 @@ def test_buildOptions(t, ZCmdBase): t.assertEqual(t.zhw.options.workerid, 0) -class ZenHubClientTest(TestCase): - """Test the ZenHubClient class.""" - - def setUp(t): - # t.reactor = Mock() - t.endpoint = sentinel.endpoint - t.credentials = Mock() - t.worker = Mock() - t.timeout = 10 - t.worklistId = "default" - - # Patch external dependencies - needs_patching = [ - "ZenPBClientFactory", - "clientFromString", - "ClientService", - "ConnectedToZenHubSignalFile", - "PingZenHub", - "backoffPolicy", - "getLogger", - "load_config", - "reactor", - "task.LoopingCall", - ] - t.patchers = {} - for target in needs_patching: - patched = patch( - "{src}.{target}".format(target=target, **PATH), - autospec=True, - ) - t.patchers[target] = patched - name = target.rpartition(".")[-1] - setattr(t, name, patched.start()) - t.addCleanup(patched.stop) - - t.zhc = ZenHubClient( - t.reactor, - t.endpoint, - t.credentials, - t.worker, - t.timeout, - t.worklistId, - ) - - def test___init__(t): - t.assertEqual(t.zhc._ZenHubClient__reactor, t.reactor) - t.assertEqual(t.zhc._ZenHubClient__endpoint, t.endpoint) - t.assertEqual(t.zhc._ZenHubClient__credentials, t.credentials) - t.assertEqual(t.zhc._ZenHubClient__worker, t.worker) - t.assertEqual(t.zhc._ZenHubClient__timeout, t.timeout) - - t.assertFalse(t.zhc._ZenHubClient__stopping) - t.assertIsNone(t.zhc._ZenHubClient__pinger) - t.assertIsNone(t.zhc._ZenHubClient__service) - t.assertEqual(t.zhc._ZenHubClient__log, t.getLogger.return_value) - t.assertEqual( - t.zhc._ZenHubClient__signalFile, - t.ConnectedToZenHubSignalFile.return_value, - ) - - @patch.object(ZenHubClient, "_ZenHubClient__prepForConnection") - def test_start(t, prepForConnection): - t.zhc._ZenHubClient__stopping = True - - t.zhc.start() - - t.assertFalse(t.zhc._ZenHubClient__stopping) - t.backoffPolicy.assert_called_once_with(initialDelay=0.5, factor=3.0) - t.ClientService.assert_called_once_with( - t.endpoint, - t.ZenPBClientFactory.return_value, - retryPolicy=t.backoffPolicy.return_value, - ) - service = t.ClientService.return_value - service.startService.assert_called_once_with() - prepForConnection.assert_called_once_with() - - @patch.object(ZenHubClient, "_ZenHubClient__reset") - def test_stop(t, reset): - t.zhc.stop() - t.assertTrue(t.zhc._ZenHubClient__stopping) - reset.assert_called_once_with() - - @patch.object(ZenHubClient, "_ZenHubClient__reset") - @patch.object(ZenHubClient, "start") - def test_restart(t, start, reset): - t.zhc.restart() - reset.assert_called_once_with() - start.assert_called_once_with() - - def test___reset_not_started(t): - signalFile = t.ConnectedToZenHubSignalFile.return_value - service = t.ClientService.return_value - pinger = t.LoopingCall.return_value - - t.zhc._ZenHubClient__reset() - - signalFile.remove.assert_called_once_with() - service.stopService.assert_not_called() - pinger.stop.assert_not_called() - - def test___reset_after_start(t): - signalFile = t.ConnectedToZenHubSignalFile.return_value - service = t.ClientService.return_value - t.zhc._ZenHubClient__service = service - pinger = t.LoopingCall.return_value - t.zhc._ZenHubClient__pinger = pinger - - t.zhc._ZenHubClient__reset() - - signalFile.remove.assert_called_once_with() - service.stopService.assert_called_once_with() - pinger.stop.assert_called_once_with() - t.assertIsNone(t.zhc._ZenHubClient__pinger) - t.assertIsNone(t.zhc._ZenHubClient__service) - - @patch.object(ZenHubClient, "_ZenHubClient__connected") - @patch.object(ZenHubClient, "_ZenHubClient__notConnected") - def test___prepForConnection(t, notConnected, connected): - service = t.ClientService.return_value - t.zhc._ZenHubClient__service = service - d = t.zhc._ZenHubClient__service.whenConnected.return_value - - t.zhc._ZenHubClient__prepForConnection() - - service.whenConnected.assert_called_once_with() - d.addCallbacks.assert_called_once_with(connected, notConnected) - - def test___prepForConnection_after_stopping(t): - service = t.ClientService.return_value - t.zhc._ZenHubClient__service = service - t.zhc._ZenHubClient__stopping = True - - t.zhc._ZenHubClient__prepForConnection() - - service.whenConnected.assert_not_called() - - @patch.object(ZenHubClient, "_ZenHubClient__prepForConnection") - def test___disconnected_not_connected(t, prepForConnection): - signalFile = t.ConnectedToZenHubSignalFile.return_value - - t.zhc._ZenHubClient__disconnected() - - prepForConnection.assert_called_once_with() - signalFile.remove.assert_called_once_with() - - @patch.object(ZenHubClient, "_ZenHubClient__prepForConnection") - def test___disconnected_after_connection(t, prepForConnection): - signalFile = t.ConnectedToZenHubSignalFile.return_value - pinger = t.LoopingCall.return_value - t.zhc._ZenHubClient__pinger = pinger - - t.zhc._ZenHubClient__disconnected() - - prepForConnection.assert_called_once_with() - signalFile.remove.assert_called_once_with() - pinger.stop.assert_called_once_with() - t.assertIsNone(t.zhc._ZenHubClient__pinger) - - @patch.object(ZenHubClient, "restart") - @patch.object(ZenHubClient, "_ZenHubClient__login") - def test___connected_no_connection(t, login, restart): - broker = MagicMock(spec=["transport"]) - broker.transport.mock_add_spec([""]) - - t.zhc._ZenHubClient__connected(broker) - restart.assert_called_once_with() - login.assert_not_called() - - @patch.object(ZenHubClient, "_ZenHubClient__login") - @patch.object(ZenHubClient, "_ZenHubClient__pingFail") - @patch.object(ZenHubClient, "restart") - @patch("{src}.setKeepAlive".format(**PATH), autospec=True) - def test___connected(t, setKeepAlive, restart, pingFail, login): - broker = MagicMock(spec=["transport", "notifyOnDisconnect"]) - broker.transport.mock_add_spec(["socket"]) - zenhub = login.return_value - pinger = t.LoopingCall.return_value - pinger_deferred = pinger.start.return_value - - t.zhc._ZenHubClient__connected(broker) - - login.assert_called_once_with(broker) - zenhub.callRemote.assert_called_once_with( - "reportingForWork", - t.zhc._ZenHubClient__worker, - workerId=t.zhc._ZenHubClient__worker.instanceId, - worklistId=t.worklistId, - ) - t.LoopingCall.assert_called_once_with(t.PingZenHub.return_value) - t.assertEqual(t.zhc._ZenHubClient__pinger, pinger) - pinger.start.assert_called_once_with( - t.zhc._ZenHubClient__timeout, - now=False, - ) - pinger_deferred.addErrback.assert_called_once_with(pingFail) - t.zhc._ZenHubClient__signalFile.touch.assert_called_once_with() - broker.notifyOnDisconnect.assert_called_once_with( - t.zhc._ZenHubClient__disconnected, - ) - - t.zhc._ZenHubClient__signalFile.remove.assert_not_called() - restart.assert_not_called() - t.reactor.stop.assert_not_called() - - @patch.object(ZenHubClient, "_ZenHubClient__login") - def test___connected_login_failure(t, login): - broker = MagicMock(spec=["transport"]) - broker.transport.mock_add_spec(["socket"]) - ex = ValueError("boom") - login.side_effect = ex - - t.zhc._ZenHubClient__connected(broker) - - login.assert_called_once_with(broker) - t.zhc._ZenHubClient__log.error.assert_called_once_with( - ANY, - type(ex).__name__, - ex, - ) - t.zhc._ZenHubClient__signalFile.remove.assert_called_once_with() - t.reactor.stop.assert_called_once_with() - - @patch.object(ZenHubClient, "_ZenHubClient__login") - @patch.object(ZenHubClient, "restart") - def test___connected_login_timeout(t, restart, login): - broker = MagicMock(spec=["transport"]) - broker.transport.mock_add_spec(["socket"]) - ex = defer.CancelledError() - login.side_effect = ex - - t.zhc._ZenHubClient__connected(broker) - - login.assert_called_once_with(broker) - t.zhc._ZenHubClient__log.error.assert_called_once_with(ANY) - restart.assert_called_once_with() - - t.zhc._ZenHubClient__signalFile.remove.assert_not_called() - t.zhc._ZenHubClient__signalFile.touch.assert_not_called() - t.reactor.stop.assert_not_called() - - @patch.object(ZenHubClient, "_ZenHubClient__login") - @patch.object(ZenHubClient, "restart") - def test___connected_reportingForWork_failure(t, restart, login): - broker = MagicMock(spec=["transport"]) - broker.transport.mock_add_spec(["socket"]) - zenhub = login.return_value - ex = ValueError("boom") - zenhub.callRemote.side_effect = ex - - t.zhc._ZenHubClient__connected(broker) - - login.assert_called_once_with(broker) - zenhub.callRemote.assert_called_once_with( - "reportingForWork", - t.zhc._ZenHubClient__worker, - workerId=t.zhc._ZenHubClient__worker.instanceId, - worklistId=t.worklistId, - ) - t.zhc._ZenHubClient__log.error.assert_called_once_with( - ANY, - type(ex).__name__, - ex, - ) - t.zhc._ZenHubClient__signalFile.remove.assert_called_once_with() - t.reactor.stop.assert_called_once_with() - - def test___login(t): - broker = MagicMock(spec=["factory"]) - broker.factory.mock_add_spec(["login"]) - expected = defer.Deferred() - broker.factory.login.return_value = expected - timeout = t.reactor.callLater.return_value - timeout.active.return_value = True - - actual = t.zhc._ZenHubClient__login(broker) - actual.callback("OK") - - t.assertEqual(expected, actual) - broker.factory.login.assert_called_once_with( - t.zhc._ZenHubClient__credentials, - t.zhc._ZenHubClient__worker, - ) - t.reactor.callLater.assert_called_once_with( - t.zhc._ZenHubClient__timeout, - actual.cancel, - ) - timeout.active.assert_called_once_with() - timeout.cancel.assert_called_once_with() - - def test___login_timeout(t): - broker = MagicMock(spec=["factory"]) - broker.factory.mock_add_spec(["login"]) - expected = defer.Deferred() - broker.factory.login.return_value = expected - timeout = t.reactor.callLater.return_value - timeout.active.return_value = False - - actual = t.zhc._ZenHubClient__login(broker) - actual.addErrback(lambda x: None) # don't propate the error - actual.cancel() - - t.assertEqual(expected, actual) - broker.factory.login.assert_called_once_with( - t.zhc._ZenHubClient__credentials, - t.zhc._ZenHubClient__worker, - ) - t.reactor.callLater.assert_called_once_with( - t.zhc._ZenHubClient__timeout, - actual.cancel, - ) - timeout.active.assert_called_once_with() - timeout.cancel.assert_not_called() - - -class PingZenHubTest(TestCase): - """Test the PingZenHub class.""" - - def setUp(t): - t.zenhub = Mock() - t.client = Mock() - # Patch external dependencies - needs_patching = [ - "getLogger", - ] - t.patchers = {} - for target in needs_patching: - patched = patch( - "{src}.{target}".format(target=target, **PATH), - autospec=True, - ) - t.patchers[target] = patched - name = target.rpartition(".")[-1] - setattr(t, name, patched.start()) - t.addCleanup(patched.stop) - - t.pzh = PingZenHub(t.zenhub, t.client) - - def test___init__(t): - logger = t.getLogger.return_value - - t.assertEqual(t.zenhub, t.pzh._PingZenHub__zenhub) - t.assertEqual(t.client, t.pzh._PingZenHub__client) - t.assertEqual(logger, t.pzh._PingZenHub__log) - t.getLogger.assert_called_once_with(t.pzh) - - def test___call__(t): - t.pzh.__call__() - t.zenhub.callRemote.assert_called_once_with("ping") - t.client.restart.assert_not_called() - - def test___call__failed(t): - logger = t.getLogger.return_value - ex = ValueError("boom") - t.zenhub.callRemote.side_effect = ex - - t.pzh.__call__() - - logger.error.assert_called_once_with(ANY, ex) - t.client.restart.assert_called_once_with() - - class ServiceReferenceFactoryTest(TestCase): """Test the ServiceReferenceFactory class.""" diff --git a/Products/ZenHub/zenhub.py b/Products/ZenHub/zenhub.py index b8422229c4..e53441ea43 100755 --- a/Products/ZenHub/zenhub.py +++ b/Products/ZenHub/zenhub.py @@ -17,7 +17,6 @@ from time import time from twisted.internet import reactor, task -from twisted.internet.defer import inlineCallbacks from zope.component import getUtility, adapts, provideUtility from zope.event import notify from zope.interface import implementer @@ -63,7 +62,7 @@ from Products.ZenHub.server.config import ServerConfig -def _load_modules(): +def _import_modules(): # Due to the manipulation of sys.path during the loading of plugins, # we can get ObjectMap imported both as DataMaps.ObjectMap and the # full-path from Products. The following gets the class registered @@ -76,7 +75,8 @@ def _load_modules(): import DataMaps # noqa: F401 -_load_modules() +_import_modules() +del _import_modules log = logging.getLogger("zen.zenhub") @@ -107,8 +107,6 @@ class ZenHub(ZCmdBase): the work to a pool of zenhubworkers, running zenhubworker.py. zenhub manages these workers with 1 data structure: - workers - a list of remote PB instances - - TODO: document invalidation workers """ totalTime = 0.0 @@ -155,7 +153,6 @@ def __init__(self): # Invalidation Processing self._invalidation_manager = InvalidationManager( self.dmd, - self.log, self.async_syncdb, self.storage.poll_invalidations, self.sendEvent, @@ -261,18 +258,6 @@ def getRRDStats(self): self._getConf(), self.zem.sendEvent ) - # Legacy API - @inlineCallbacks - def processQueue(self): - """Periodically process database changes.""" - yield self._invalidation_manager.process_invalidations() - - # Legacy API - def _initialize_invalidation_filters(self): - self._invalidation_filters = ( - self._invalidation_manager.initialize_invalidation_filters() - ) - def sendEvent(self, **kw): """Post events to the EventManager. @@ -386,7 +371,7 @@ def buildOptions(self): @implementer(IHubConfProvider) -class DefaultConfProvider(object): # noqa: D101 +class DefaultConfProvider(object): adapts(ZenHub) def __init__(self, zenhub): @@ -400,7 +385,7 @@ def getHubConf(self): @implementer(IHubHeartBeatCheck) -class DefaultHubHeartBeatCheck(object): # noqa: D101 +class DefaultHubHeartBeatCheck(object): adapts(ZenHub) def __init__(self, zenhub): diff --git a/Products/ZenHub/zenhubclient.py b/Products/ZenHub/zenhubclient.py new file mode 100644 index 0000000000..95216874a3 --- /dev/null +++ b/Products/ZenHub/zenhubclient.py @@ -0,0 +1,335 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import collections +import logging + +from twisted.application.internet import backoffPolicy, ClientService +from twisted.internet import defer +from twisted.spread import pb + +from Products.ZenUtils.PBUtil import setKeepAlive + +from .errors import HubDown +from .server import ZenPBClientFactory + +log = logging.getLogger("zen.zenhubclient") + + +class ZenHubClient(object): + """ + A client for interacting with the ZenHub service. + + After start is called, this class automatically handles connecting to + ZenHub, logging into ZenHub, and automatically reconnecting to ZenHub if + the connection to ZenHub is corrupted for any reason. + """ + + def __init__(self, app, endpoint, credentials, timeout, reactor=None): + """Initialize a CoreZenHubClient instance. + + :param app: Reference to the application object. + :type app: pb.Referenceable + :param endpoint: Where zenhub is found + :type endpoint: IStreamClientEndpoint + :param credentials: Credentials to log into ZenHub. + :type credentials: IUsernamePassword + :param float timeout: Seconds to wait before determining whether + ZenHub is unresponsive. + :type reactor: IReactorCore | None + """ + self._clientref = app + self._endpoint = endpoint + self._credentials = credentials + self._timeout = timeout + self._reactor = _get_reactor(reactor) + self._service = ClientService( + self._endpoint, + ZenPBClientFactory(), + retryPolicy=_getBackoffPolicy(initialDelay=0.5, factor=3.0), + prepareConnection=self._new_connection, + ) + self._connected_callbacks = [] + self._disconnected_callbacks = [] + self._zenhubref = None + self._instanceId = None + self._services = {} + + @property + def is_connected(self): + # type: () -> bool + """ + Returns True if there is a current connection to ZenHub. + """ + return self._zenhubref is not None + + @property + def instance_id(self): + # type: () -> str | None + """ + Return ZenHub's Control Center instance ID. + + The value is a string containing a number or is None. + """ + return self._instanceId + + @property + def services(self): + # type: () -> collections.Mapping[str, pb.Referenceable] + """ + Return the currently loaded ZenHub services. + + The return value is an immutable mapping of service names to service + references. + """ + return _FrozenDictProxy(self._services) + + def start(self): + # type: () -> defer.Deferred + """ + Start connecting to ZenHub. + + On a successful connection, the returned Deferred's callback is + invoked with the ZenHub broker instance. On failure, the errback + is invoked with the error. + + :rtype: defer.Deferred + """ + self._service.startService() + log.debug("started client service service=%r", self._service) + return self._service.whenConnected() + + def stop(self): + # type: () -> defer.Deferred + """ + Stop connecting to ZenHub. + + When the connection is closed, the returned Deferred is called. + + :rtype: defer.Deferred + """ + try: + return self._service.stopService() + finally: + self._reset() + + def notify_on_connect(self, f): + """ + Register a callable for invocation when a ZenHub connection has + been created or recreated. + """ + self._connected_callbacks.append(f) + + def notify_on_disconnect(self, f): + """ + Register a callable for invocation when ZenHub is disconnected. + + Once a callable has been invoked, it is removed from the set of + callables. It is recommended that callables for disconnect + notifications should be registered by the callables registered for + connection notifications. + """ + self._disconnected_callbacks.append(f) + + @defer.inlineCallbacks + def ping(self): + """ + If connected to ZenHub, 'pings' ZenHub. + + The response will be "pong" if successful. + """ + if self._zenhubref is None: + raise HubDown("not connected to ZenHub") + response = yield self._zenhubref.callRemote("ping") + defer.returnValue(response) + + @defer.inlineCallbacks + def register_worker(self, worker, instanceId, worklistId): + """ + Register the worker as a zenhubworker with ZenHub. + + The worker is identified by `instanceId` and `worklistId`. + + @param worker: the worker that will accept RPC calls from ZenHub. + @type worker: pb.IReferenceable + @param instanceId: the worker's name + @type instanceId: str + @param worklistId: the 'queue' the worker accepts work from + @type worklistId: str + """ + if self._zenhubref is None: + raise HubDown("not connected to ZenHub") + yield self._zenhubref.callRemote( + "reportForWork", + worker, + name=instanceId, + worklistId=worklistId, + ) + + @defer.inlineCallbacks + def unregister_worker(self, instanceId, worklistId): + """ + Unregister the worker from ZenHub. + + The worker is identified by `instanceId` and `worklistId`. + + @param instanceId: the worker's name + @type instanceId: str + @param worklistId: the 'queue' the worker accepts work from + @type worklistId: str + """ + if self._zenhubref is None: + raise HubDown("not connected to ZenHub") + yield self._zenhubref.callRemote( + "resignFromWork", + name=instanceId, + worklistId=worklistId, + ) + + @defer.inlineCallbacks + def get_service(self, name, monitor, listener, options): + # type: (str, str, object, collections.Mapping) -> defer.Deferred + """ + Return a reference to the named ZenHub service. + + :param name: Name of the service + :param monitor: Name of the collector + :param listener: Object reference to caller + :param options: key/value data relevant to the service + """ + if self._zenhubref is None: + raise HubDown("not connected to ZenHub") + + if name in self._services: + defer.returnValue(self._services[name]) + + service_ref = yield self._zenhubref.callRemote( + "getService", name, monitor, listener, options + ) + self._services[name] = service_ref + log.debug( + "retrieved remote reference to ZenHub service " + "name=%s collector=%s service=%r", + name, + monitor, + service_ref, + ) + defer.returnValue(service_ref) + + @defer.inlineCallbacks + def _new_connection(self, broker): + log.debug("connected to ZenHub broker=%r", broker) + try: + if hasattr(broker.transport, "socket"): + setKeepAlive(broker.transport.socket) + else: + log.warning("broker.transport.socket attribute is missing") + + self._zenhubref = yield self._login(broker) + + self._instanceId = yield self._zenhubref.callRemote( + "getHubInstanceId" + ) + except defer.CancelledError: + log.error("timed out trying to login to ZenHub") + raise + except pb.RemoteError as ex: + log.error( + "login rejected by ZenHub error=%s message=%s", + ex.remoteType, + ex.args[0] if ex.args else "", + ) + raise + except Exception: + log.exception("unexpected error communicating with ZenHub") + raise + else: + log.info("connected to ZenHub instance-id=%s", self._instanceId) + # Connection complete; install a listener to be notified if + # the connection is lost. + broker.notifyOnDisconnect(self._disconnected) + + log.debug( + "calling %d on-connect callbacks", + len(self._connected_callbacks), + ) + for callback in self._connected_callbacks: + try: + yield defer.maybeDeferred(callback) + except Exception: + log.exception( + "connect callback error callback=%r", callback + ) + + def _login(self, broker): + d = broker.factory.login(self._credentials, self._clientref) + timeoutCall = self._reactor.callLater(self._timeout, d.cancel) + + def completedLogin(arg): + if timeoutCall.active(): + timeoutCall.cancel() + return arg + + d.addBoth(completedLogin) + return d + + def _disconnected(self): + logmethod = log.warning if self._service.running else log.info + logmethod("disconnected from ZenHub") + self._reset() + while len(self._disconnected_callbacks): + callback = self._disconnected_callbacks.pop(0) + try: + callback() + except Exception: + log.exception( + "disconnect callback error callback=%r", callback + ) + + def _reset(self): + self._zenhubref = None + self._services.clear() + + +def _get_reactor(reactor): + if reactor is None: + from twisted.internet import reactor as global_reactor + + return global_reactor + else: + return reactor + + +class _FrozenDictProxy(collections.Mapping): + def __init__(self, data): + self.__data = data + + def __getitem__(self, key): + return self.__data[key] + + def __contains__(self, key): + return key in self.__data + + def __len__(self): + return len(self.__data) + + def __iter__(self): + return iter(self.__data) + + +def _getBackoffPolicy(*args, **kw): + policy = backoffPolicy(*args, **kw) + + def _policy(attempt): + log.info( + "no connection to ZenHub; is ZenHub running? attempt=%s", attempt + ) + return policy(attempt) + + return _policy diff --git a/Products/ZenHub/zenhubworker.py b/Products/ZenHub/zenhubworker.py index 3fa6f9d99d..ec669631ae 100755 --- a/Products/ZenHub/zenhubworker.py +++ b/Products/ZenHub/zenhubworker.py @@ -10,49 +10,60 @@ from __future__ import absolute_import +import json import logging -import os import signal import time +import types from collections import defaultdict from contextlib import contextmanager from optparse import SUPPRESS_HELP, OptParseError from metrology import Metrology -from twisted.application.internet import ClientService, backoffPolicy from twisted.cred.credentials import UsernamePassword -from twisted.internet.endpoints import clientFromString -from twisted.internet import defer, reactor, error, task +from twisted.internet import defer, reactor, error +from twisted.internet.endpoints import clientFromString, serverFromString from twisted.spread import pb +from twisted.web._responses import INTERNAL_SERVER_ERROR from zope.component import getGlobalSiteManager import Products.ZenHub as ZENHUB_MODULE from Products.DataCollector.Plugins import loadPlugins from Products.ZenHub import PB_PORT +from Products.ZenHub.localserver import ( + ErrorResponse, + LocalServer, + ZenHubStatus, + ZenResource, +) from Products.ZenHub.metricmanager import MetricManager, IMetricManager from Products.ZenHub.server import ( ServiceLoader, ServiceManager, ServiceRegistry, UnknownServiceError, - ZenPBClientFactory, ) -from Products.ZenHub.PBDaemon import RemoteBadMonitor +from Products.ZenHub.errors import RemoteBadMonitor +from Products.ZenHub.pinger import PingZenHub +from Products.ZenHub.zenhubclient import ZenHubClient from Products.ZenUtils.debugtools import ContinuousProfiler -from Products.ZenUtils.PBUtil import setKeepAlive from Products.ZenUtils.Time import isoDateTime -from Products.ZenUtils.Utils import zenPath, atomicWrite, load_config +from Products.ZenUtils.Utils import load_config from Products.ZenUtils.ZCmdBase import ZCmdBase + IDLE = "None/None" def getLogger(obj): """Return a logger based on the name of the given class.""" - cls = type(obj) - name = "zen.zenhubworker.%s" % (cls.__name__) + if isinstance(obj, types.InstanceType): + name = obj.__class__.__name__ + else: + name = type(obj).__name__ + name = "zen.zenhubworker.%s" % (name.lower()) return logging.getLogger(name) @@ -97,13 +108,28 @@ def __init__(self, reactor): ) endpoint = clientFromString(reactor, endpointDescriptor) self.__client = ZenHubClient( - reactor, + self, endpoint, creds, - self, self.options.hub_response_timeout, - self.worklistId, + reactor, + ) + self.__client.notify_on_connect(self._register_worker) + + # Configure/initialize the zenhub pinger + self.__pinger = PingZenHub(self.__client) + + # bind the server to the localhost interface so only local + # connections can be established. + server_endpoint_descriptor = "tcp:{port}:interface=127.0.0.1".format( + port=self.options.localport ) + server_endpoint = serverFromString(reactor, server_endpoint_descriptor) + self.__server = LocalServer(reactor, server_endpoint) + self.__server.add_resource( + "zenhub", ZenHubStatus(lambda: self.getZenHubStatus()) + ) + self.__server.add_resource("stats", _ZenHubWorkerStats(self)) # Setup Metric Reporting self.log.debug("Creating async MetricReporter") @@ -133,6 +159,16 @@ def start(self): "before", "shutdown", self.__client.stop ) + self.__pinger.start() + self.__reactor.addSystemEventTrigger( + "before", "shutdown", self.__pinger.stop + ) + + self.__server.start() + self.__reactor.addSystemEventTrigger( + "before", "shutdown", self.__server.stop + ) + self._metric_manager.start() self.__reactor.addSystemEventTrigger( "before", "shutdown", self._metric_manager.stop @@ -186,15 +222,30 @@ def sighandler_USR1(self, signum, frame): if self.options.profiling: self.profiler.dump_stats() super(ZenHubWorker, self).sighandler_USR1(signum, frame) - except Exception: - pass + except Exception as ex: + self.log.warning("error while handling a USR1 signal: %s", ex) def sighandler_USR2(self, *args): """Handle USR2 signals.""" try: self.reportStats() - except Exception: - pass + except Exception as ex: + self.log.warning("error while reporting statistics: %s", ex) + + @defer.inlineCallbacks + def _register_worker(self): + try: + yield self.__client.register_worker( + self, self.instanceId, self.worklistId + ) + except Exception as ex: + self.log.error( + "failed to register zenhubworker with zenhub " + "error=%s instance-id=%s worklist-id=%s", + ex, + self.instanceId, + self.worklistId, + ) def _work_started(self, startTime): self.currentStart = startTime @@ -206,29 +257,58 @@ def _work_finished(self, duration, method): self.currentStart = 0 if self.numCalls.count >= self.options.call_limit: self.log.info( - "Call limit of %s reached, " - "proceeding to shutdown (and restart)", + "Call limit of %s reached, proceeding to shutdown", self.options.call_limit, ) self.__reactor.callLater(0, self._shutdown) + def getZenHubStatus(self): + return "connected" if self.__client.is_connected else "disconnected" + + def getStats(self): + results = {"current": self.current} + if self.current != IDLE: + results["current.elapsed"] = time.time() - self.currentStart + + if self.__registry: + sorted_data = sorted( + self.__registry.iteritems(), + key=lambda kv: kv[0][1].rpartition(".")[-1], + ) + summarized_stats = [] + for (_, svc), svcob in sorted_data: + svc = "%s" % svc.rpartition(".")[-1] + for method, stats in sorted(svcob.callStats.items()): + summarized_stats.append( + { + "service": svc, + "method": method, + "count": stats.numoccurrences, + "total": stats.totaltime, + "average": stats.totaltime / stats.numoccurrences + if stats.numoccurrences + else 0.0, + "last-run": isoDateTime(stats.lasttime), + } + ) + results["statistics"] = summarized_stats + + return results + def reportStats(self): """Write zenhubworker's current statistics to the log.""" - now = time.time() - if self.current != IDLE: + stats = self.getStats() + if stats["current"] != IDLE: self.log.info( "Currently performing %s, elapsed %.2f s", - self.current, - now - self.currentStart, + stats["current"], + stats["current.elapsed"], ) else: self.log.info("Currently IDLE") - if self.__registry: + statistics = stats.get("statistics") + if statistics: loglines = ["Running statistics:"] - sorted_data = sorted( - self.__registry.iteritems(), - key=lambda kv: kv[0][1].rpartition(".")[-1], - ) loglines.append( " %-50s %-32s %8s %12s %8s %s" % ( @@ -240,22 +320,18 @@ def reportStats(self): "Last Run", ) ) - for (_, svc), svcob in sorted_data: - svc = "%s" % svc.rpartition(".")[-1] - for method, stats in sorted(svcob.callStats.items()): - loglines.append( - " - %-48s %-32s %8d %12.2f %8.2f %s" - % ( - svc, - method, - stats.numoccurrences, - stats.totaltime, - stats.totaltime / stats.numoccurrences - if stats.numoccurrences - else 0.0, - isoDateTime(stats.lasttime), - ), - ) + for entry in statistics: + loglines.append( + " - %-48s %-32s %8d %12.2f %8.2f %s" + % ( + entry["service"], + entry["method"], + entry["count"], + entry["total"], + entry["average"], + entry["last-run"], + ), + ) self.log.info("\n".join(loglines)) else: self.log.info("no service activity statistics") @@ -294,9 +370,19 @@ def remote_ping(self): Used by ZenHub to determine whether zenhubworker is still active. """ + if self.numCalls.count >= self.options.call_limit: + raise pb.PBConnectionLost("restarting due to call limit") return "pong" + @defer.inlineCallbacks def _shutdown(self): + self.log.info("disconnecting from zenhub") + try: + yield self.__client.unregister_worker( + self.instanceId, self.worklistId + ) + except Exception as ex: + self.log.error("error while unregistering from zenhub: %s", ex) self.log.info("Shutting down") try: self.__reactor.stop() @@ -306,6 +392,7 @@ def _shutdown(self): def buildOptions(self): """Add optparse options to the options parser.""" ZCmdBase.buildOptions(self) + LocalServer.buildOptions(self.parser) self.parser.add_option( "--hubhost", dest="hubhost", @@ -368,216 +455,22 @@ def buildOptions(self): ) -class ZenHubClient(object): - """A client for connecting to ZenHub as a ZenHub Worker. - - After start is called, this class automatically handles connecting to - ZenHub, registering the zenhubworker with ZenHub, and automatically - reconnecting to ZenHub if the connection to ZenHub is corrupted for - any reason. - """ - - def __init__( - self, - reactor, - endpoint, - credentials, - worker, - timeout, - worklistId, - ): - """Initialize a ZenHubClient instance. - - :type reactor: IReactorCore - :param endpoint: Where zenhub is found - :type endpoint: IStreamClientEndpoint - :param credentials: Credentials to log into ZenHub. - :type credentials: IUsernamePassword - :param worker: Reference to worker - :type worker: IReferenceable - :param float timeout: Seconds to wait before determining whether - ZenHub is unresponsive. - :param str worklistId: Name of the worklist to receive tasks from. - """ - self.__reactor = reactor - self.__endpoint = endpoint - self.__credentials = credentials - self.__worker = worker - self.__timeout = timeout - self.__worklistId = worklistId - - self.__stopping = False - self.__pinger = None - self.__service = None - - self.__log = getLogger(self) - self.__signalFile = ConnectedToZenHubSignalFile() - - def start(self): - """Start connecting to ZenHub.""" - self.__stopping = False - factory = ZenPBClientFactory() - self.__service = ClientService( - self.__endpoint, - factory, - retryPolicy=backoffPolicy(initialDelay=0.5, factor=3.0), - ) - self.__service.startService() - self.__prepForConnection() - - def stop(self): - """Stop connecting to ZenHub.""" - self.__stopping = True - self.__reset() - - def restart(self): - """Restart the connect to ZenHub.""" - self.__reset() - self.start() - - def __reset(self): - if self.__pinger: - self.__pinger.stop() - self.__pinger = None - if self.__service: - self.__service.stopService() - self.__service = None - self.__signalFile.remove() - - def __prepForConnection(self): - if not self.__stopping: - self.__log.info("Prepping for connection") - self.__service.whenConnected().addCallbacks( - self.__connected, self.__notConnected - ) - - def __disconnected(self, *args): - # Called when the connection to ZenHub is lost. - # Ensures that processing resumes when the connection to ZenHub - # is restored. - self.__log.info( - "Lost connection to ZenHub: %s", - args[0] if args else "", - ) - if self.__pinger: - self.__pinger.stop() - self.__pinger = None - self.__signalFile.remove() - self.__prepForConnection() - - def __notConnected(self, *args): - self.__log.info("Not connected! %r", args) - - @defer.inlineCallbacks - def __connected(self, broker): - # Called when a connection to ZenHub is established. - # Logs into ZenHub and passes up a worker reference for ZenHub - # to use to dispatch method calls. - - # Sometimes broker.transport doesn't have a 'socket' attribute - if not hasattr(broker.transport, "socket"): - self.restart() - defer.returnValue(None) +class _ZenHubWorkerStats(ZenResource): + def __init__(self, worker): + ZenResource.__init__(self) + self._worker = worker - self.__log.info("Connection to ZenHub established") + def render_GET(self, request): try: - setKeepAlive(broker.transport.socket) - - zenhub = yield self.__login(broker) - yield zenhub.callRemote( - "reportingForWork", - self.__worker, - workerId=self.__worker.instanceId, - worklistId=self.__worklistId, - ) - - ping = PingZenHub(zenhub, self) - self.__pinger = task.LoopingCall(ping) - d = self.__pinger.start(self.__timeout, now=False) - d.addErrback(self.__pingFail) # Catch and pass on errors - except defer.CancelledError: - self.__log.error("Timed out trying to login to ZenHub") - self.restart() - defer.returnValue(None) - except Exception as ex: - self.__log.error( - "Unable to report for work: (%s) %s", type(ex).__name__, ex + request.responseHeaders.addRawHeader( + b"content-type", b"application/json; charset=utf-8" ) - self.__signalFile.remove() - self.__reactor.stop() - else: - self.__log.info("Logged into ZenHub") - self.__signalFile.touch() - - # Connection complete; install a listener to be notified if - # the connection is lost. - broker.notifyOnDisconnect(self.__disconnected) - - def __login(self, broker): - d = broker.factory.login(self.__credentials, self.__worker) - timeoutCall = self.__reactor.callLater(self.__timeout, d.cancel) - - def completedLogin(arg): - if timeoutCall.active(): - timeoutCall.cancel() - return arg - - d.addBoth(completedLogin) - return d - - def __pingFail(self, ex): - self.__log.error("Pinger failed: %s", ex) - - -class PingZenHub(object): - """Simple task to ping ZenHub. - - PingZenHub's real purpose is to allow the ZenHubWorker to detect when - ZenHub is no longer responsive (for whatever reason). - """ - - def __init__(self, zenhub, client): - """Initialize a PingZenHub instance.""" - self.__zenhub = zenhub - self.__client = client - self.__log = getLogger(self) - - @defer.inlineCallbacks - def __call__(self): - """Ping zenhub. - - If the ping fails, causes the connection to ZenHub to reset. - """ - self.__log.debug("Pinging zenhub") - try: - response = yield self.__zenhub.callRemote("ping") - self.__log.debug("Pinged zenhub: %s", response) - except Exception as ex: - self.__log.error("Ping failed: %s", ex) - self.__client.restart() - - -class ConnectedToZenHubSignalFile(object): - """Manages a file that indicates successful connection to ZenHub.""" - - def __init__(self): - """Initialize a ConnectedToZenHubSignalFile instance.""" - filename = "zenhub_connected" - self.__signalFilePath = zenPath("var", filename) - self.__log = getLogger(self) - - def touch(self): - """Create the file.""" - atomicWrite(self.__signalFilePath, "") - self.__log.debug("Created file '%s'", self.__signalFilePath) - - def remove(self): - """Delete the file.""" - try: - os.remove(self.__signalFilePath) + return json.dumps(self._worker.getStats()) except Exception: - pass - self.__log.debug("Removed file '%s'", self.__signalFilePath) + self.log.exception("failed to get zenhubworker stats") + return ErrorResponse( + INTERNAL_SERVER_ERROR, "zenhubworker statistics unavailable" + ) class ServiceReferenceFactory(object): diff --git a/Products/ZenHub/zodb.py b/Products/ZenHub/zodb.py index f47a1ebab5..5ed96d9ce3 100644 --- a/Products/ZenHub/zodb.py +++ b/Products/ZenHub/zodb.py @@ -11,8 +11,8 @@ from zope.component import provideHandler from zope.component.interfaces import ObjectEvent +from zope.interface import implementer from zope.interface.advice import addClassAdvisor -from zope.interface import implements from .interfaces import IUpdateEvent, IDeletionEvent @@ -25,12 +25,14 @@ def __init__(self, object, oid): self.oid = oid +@implementer(IUpdateEvent) class UpdateEvent(InvalidationEvent): - implements(IUpdateEvent) + pass +@implementer(IDeletionEvent) class DeletionEvent(InvalidationEvent): - implements(IDeletionEvent) + pass def _listener_decorator_factory(eventtype): diff --git a/Products/ZenMessaging/ChangeEvents/events.py b/Products/ZenMessaging/ChangeEvents/events.py index d5d6d0f71f..ba966ef204 100644 --- a/Products/ZenMessaging/ChangeEvents/events.py +++ b/Products/ZenMessaging/ChangeEvents/events.py @@ -82,12 +82,12 @@ def __init__(self, msgs, maintWindowChanges, refs=None): self.maintWindowChanges = maintWindowChanges +@implementer(IMessagePostPublishingEvent) class MessagePostPublishingEvent(object): """ Fired after transaction completion. """ - implementer(IMessagePostPublishingEvent) def __init__(self, refs=None): self.refs = refs diff --git a/Products/ZenMessaging/queuemessaging/adapters.py b/Products/ZenMessaging/queuemessaging/adapters.py index 405cfffeb6..39be82b372 100644 --- a/Products/ZenMessaging/queuemessaging/adapters.py +++ b/Products/ZenMessaging/queuemessaging/adapters.py @@ -9,7 +9,7 @@ from zenoss.protocols.protobufs import zep_pb2 as eventConstants from zenoss.protocols.protobufs import model_pb2 as modelConstants -from zope.interface import implements +from zope.interface import implementer from Products.ZenEvents.events2.proxy import EventProxy from Products.ZenMessaging.queuemessaging.interfaces import ( @@ -61,12 +61,12 @@ def autoMapFields(self, proto): continue +@implementer(IModelProtobufSerializer) class DeviceProtobuf(ObjectProtobuf): """ Fills up the properties of a device protobuf. """ - implements(IModelProtobufSerializer) @property def modelType(self): @@ -78,12 +78,12 @@ def fill(self, proto): return proto +@implementer(IModelProtobufSerializer) class OrganizerProtobuf(ObjectProtobuf): """ Fills up the properties of an organizer protobuf. """ - implements(IModelProtobufSerializer) @property def modelType(self): @@ -97,12 +97,12 @@ def fill(self, proto): return proto +@implementer(IModelProtobufSerializer) class DeviceComponentProtobuf(ObjectProtobuf): """ Fills up the properties of a Device Component """ - implements(IModelProtobufSerializer) @property def modelType(self): @@ -309,12 +309,12 @@ def mapEvent(self, proto, value): proto.details.add(name=self._detailName, value=[value]) +@implementer(IProtobufSerializer) class EventProtobuf(ObjectProtobuf): """ Fills up the properties of an event """ - implements(IProtobufSerializer) # event property, protobuf property _FIELD_MAPPERS = { @@ -380,10 +380,11 @@ def __init__(self, obj): ObjectProtobuf.__init__(self, obj) def addDetail(self, proto, name, value): - isIterable = lambda x : hasattr(x, '__iter__') detail = proto.details.add() detail.name = name - if isIterable(value): + # Test whether 'value' is iterable. + # Avoids strings because strings don't have an __iter__ method. + if hasattr(value, "__iter__"): for v in value: detail.value.append(_safestr(v)) else: diff --git a/Products/ZenMessaging/queuemessaging/publisher.py b/Products/ZenMessaging/queuemessaging/publisher.py index ce70512ccc..06d2d1b9cd 100644 --- a/Products/ZenMessaging/queuemessaging/publisher.py +++ b/Products/ZenMessaging/queuemessaging/publisher.py @@ -281,6 +281,7 @@ def _getPrepublishingTimer(): class PublishSynchronizer(object): _queuePublisher = None + _postPublishingEventArgs = () def findNonImpactingEvents(self, events): """ diff --git a/Products/ZenModel/Device.py b/Products/ZenModel/Device.py index ee8b8f129d..21fe872b01 100644 --- a/Products/ZenModel/Device.py +++ b/Products/ZenModel/Device.py @@ -38,7 +38,7 @@ from ZODB.POSException import POSError from zope.component import subscribers from zope.event import notify -from zope.interface import implements +from zope.interface import implementer from Products.Jobber.jobs import FacadeMethodJob from Products.PluginIndexes.FieldIndex.FieldIndex import FieldIndex @@ -50,7 +50,8 @@ from Products.ZenUtils import NetworkTree, Time from Products.ZenUtils.deprecated import deprecated from Products.ZenUtils.guid.interfaces import ( - IGloballyIdentifiable, IGlobalIdentifier, + IGlobalIdentifier, + IGloballyIdentifiable, ) from Products.ZenUtils.IpUtil import ( checkip, @@ -67,22 +68,26 @@ makeMultiPathIndex, ) from Products.ZenUtils.Utils import ( - edgesToXML, getObjectsFromCatalog, isXmlRpc, unpublished, unused, + edgesToXML, + getObjectsFromCatalog, + isXmlRpc, + unpublished, + unused, ) from Products.ZenWidgets import messaging from Products.ZenWidgets.interfaces import IMessageSender +from Products.Zuul import getFacade from Products.Zuul.catalog.events import IndexingEvent from Products.Zuul.catalog.indexable import DeviceIndexable from Products.Zuul.catalog.interfaces import IModelCatalogTool -from Products.Zuul import getFacade from .AdministrativeRoleable import AdministrativeRoleable from .Commandable import Commandable from .DeviceHW import DeviceHW from .EventView import IEventView from .Exceptions import DeviceExistsError, NoSnmp -from .Lockable import Lockable from .interfaces import IExpandedLinkProvider +from .Lockable import Lockable from .MaintenanceWindowable import MaintenanceWindowable from .ManagedEntity import ManagedEntity from .OperatingSystem import OperatingSystem @@ -109,26 +114,42 @@ "COMMAND": "zencommand", } -log = logging.getLogger("zen.Device") +log = logging.getLogger("zen.model.device") def getNetworkRoot(context, performanceMonitor): """ Return the default network root. """ - return context.getDmdRoot('Networks') - - -def manage_createDevice(context, deviceName, devicePath="/Discovered", - tag="", serialNumber="", - zSnmpCommunity="", zSnmpPort=161, zSnmpVer="", - rackSlot="", productionState=DEFAULT_PRODSTATE, comments="", - hwManufacturer="", hwProductName="", - osManufacturer="", osProductName="", - locationPath="", groupPaths=[], systemPaths=[], - performanceMonitor="localhost", - discoverProto="snmp", priority=3, manageIp="", - zProperties=None, title=None): + return context.getDmdRoot("Networks") + + +def manage_createDevice( + context, + deviceName, + devicePath="/Discovered", + tag="", + serialNumber="", + zSnmpCommunity="", + zSnmpPort=161, + zSnmpVer="", + rackSlot="", + productionState=DEFAULT_PRODSTATE, + comments="", + hwManufacturer="", + hwProductName="", + osManufacturer="", + osProductName="", + locationPath="", + groupPaths=[], + systemPaths=[], + performanceMonitor="localhost", + discoverProto="snmp", + priority=3, + manageIp="", + zProperties=None, + title=None, +): """ Device factory creates a device and sets up its relations and collects its configuration. SNMP Community discovery also happens here. If an IP is @@ -137,46 +158,72 @@ def manage_createDevice(context, deviceName, devicePath="/Discovered", @rtype: Device """ - manageIp = manageIp.replace(' ', '') + manageIp = manageIp.replace(" ", "") deviceName = context.prepId(deviceName) - log.info("device name '%s' for ip '%s'", deviceName, manageIp) deviceClass = context.getDmdRoot("Devices").createOrganizer(devicePath) - device = deviceClass.createInstance(deviceName, performanceMonitor, manageIp) + device = deviceClass.createInstance( + deviceName, performanceMonitor, manageIp + ) device.setPerformanceMonitor(performanceMonitor) device.setManageIp(manageIp) device.manage_editDevice( - tag, serialNumber, - zSnmpCommunity, zSnmpPort, zSnmpVer, - rackSlot, productionState, comments, - hwManufacturer, hwProductName, - osManufacturer, osProductName, - locationPath, groupPaths, systemPaths, - performanceMonitor, priority, zProperties, - title) + tag, + serialNumber, + zSnmpCommunity, + zSnmpPort, + zSnmpVer, + rackSlot, + productionState, + comments, + hwManufacturer, + hwProductName, + osManufacturer, + osProductName, + locationPath, + groupPaths, + systemPaths, + performanceMonitor, + priority, + zProperties, + title, + ) + log.info( + "created device name=%s manageIp=%s collector=%s class=%s", + deviceName, + manageIp, + performanceMonitor, + devicePath, + ) return device -def findCommunity(context, ip, devicePath, - community="", port=None, version=None): +def findCommunity( + context, ip, devicePath, community="", port=None, version=None +): """ - Find the SNMP community and version for an IP address using zSnmpCommunities. + Find the SNMP community and version for an IP address using + zSnmpCommunities. @rtype: tuple of (community, port, version, device name) """ from pynetsnmp.SnmpSession import SnmpSession - devroot = context.getDmdRoot('Devices').createOrganizer(devicePath) + devroot = context.getDmdRoot("Devices").createOrganizer(devicePath) communities = [] - if community: communities.append(community) + if community: + communities.append(community) communities.extend(getattr(devroot, "zSnmpCommunities", [])) - if not port: port = getattr(devroot, "zSnmpPort", 161) - versions = ('v2c', 'v1') - if not version: version = getattr(devroot, 'zSnmpVer', None) - if version: versions = (version,) + if not port: + port = getattr(devroot, "zSnmpPort", 161) + versions = ("v2c", "v1") + if not version: + version = getattr(devroot, "zSnmpVer", None) + if version: + versions = (version,) timeout = getattr(devroot, "zSnmpTimeout", 2) retries = getattr(devroot, "zSnmpTries", 2) session = SnmpSession(ip, timeout=timeout, port=port, retries=retries) - oid = '.1.3.6.1.2.1.1.5.0' + oid = ".1.3.6.1.2.1.1.5.0" goodcommunity = "" goodversion = "" devname = "" @@ -189,10 +236,12 @@ def findCommunity(context, ip, devicePath, goodcommunity = session.community goodversion = version break - except POSError: raise - except Exception: pass #keep trying until we run out + except POSError: + raise + except Exception: + pass # keep trying until we run out if goodcommunity: - break + break else: raise NoSnmp("No SNMP found for IP = %s" % ip) return (goodcommunity, port, goodversion, devname) @@ -206,40 +255,40 @@ def manage_addDevice(context, id, REQUEST=None): serv = Device(id) context._setObject(serv.id, serv) if REQUEST is not None: - # TODO: there is no "self"! Fix UI feedback code. - #messaging.IMessageSender(self).sendToBrowser( - # 'Device Added', - # 'Device %s has been created.' % id - #) - - # TODO: test this audits correctly. How is this called? - #uid = context._getOb(serv.id).getPrimaryId() - audit('UI.Device.Add', serv, deviceClass=context) - REQUEST['RESPONSE'].redirect(context.absolute_url_path()+'/manage_main') + audit("UI.Device.Add", serv, deviceClass=context) + REQUEST["RESPONSE"].redirect( + context.absolute_url_path() + "/manage_main" + ) -addDevice = DTMLFile('dtml/addDevice',globals()) +addDevice = DTMLFile("dtml/addDevice", globals()) class NoNetMask(Exception): pass -class Device(ManagedEntity, Commandable, Lockable, MaintenanceWindowable, - AdministrativeRoleable, ZenMenuable, DeviceIndexable): +@implementer(IEventView, IGloballyIdentifiable) +class Device( + ManagedEntity, + Commandable, + Lockable, + MaintenanceWindowable, + AdministrativeRoleable, + ZenMenuable, + DeviceIndexable, +): """ Device is a base class that represents the idea of a single computer system that is made up of software running on hardware. It currently must be IP enabled but maybe this will change. """ - implements(IEventView, IGloballyIdentifiable) - - event_key = portal_type = meta_type = 'Device' + event_key = portal_type = meta_type = "Device" default_catalog = "deviceSearch" - relationshipManagerPathRestriction = '/Devices' + relationshipManagerPathRestriction = "/Devices" title = "" manageIp = "" snmpAgent = "" @@ -262,70 +311,87 @@ class Device(ManagedEntity, Commandable, Lockable, MaintenanceWindowable, _temp_device = False _properties = ManagedEntity._properties + ( - {'id':'title', 'type':'string', 'mode':'w'}, - {'id':'manageIp', 'type':'string', 'mode':'w'}, - {'id':'snmpAgent', 'type':'string', 'mode':'w'}, - {'id':'snmpDescr', 'type':'string', 'mode':''}, - {'id':'snmpOid', 'type':'string', 'mode':''}, - {'id':'snmpContact', 'type':'string', 'mode':''}, - {'id':'snmpSysName', 'type':'string', 'mode':''}, - {'id':'snmpLocation', 'type':'string', 'mode':''}, - {'id':'snmpLastCollection', 'type':'date', 'mode':''}, - {'id':'snmpAgent', 'type':'string', 'mode':''}, - {'id':'rackSlot', 'type':'string', 'mode':'w'}, - {'id':'comments', 'type':'text', 'mode':'w'}, - {'id':'sysedgeLicenseMode', 'type':'string', 'mode':''}, - {'id':'priority', 'type':'int', 'mode':'w'}, - ) + {"id": "title", "type": "string", "mode": "w"}, + {"id": "manageIp", "type": "string", "mode": "w"}, + {"id": "snmpAgent", "type": "string", "mode": "w"}, + {"id": "snmpDescr", "type": "string", "mode": ""}, + {"id": "snmpOid", "type": "string", "mode": ""}, + {"id": "snmpContact", "type": "string", "mode": ""}, + {"id": "snmpSysName", "type": "string", "mode": ""}, + {"id": "snmpLocation", "type": "string", "mode": ""}, + {"id": "snmpLastCollection", "type": "date", "mode": ""}, + {"id": "snmpAgent", "type": "string", "mode": ""}, + {"id": "rackSlot", "type": "string", "mode": "w"}, + {"id": "comments", "type": "text", "mode": "w"}, + {"id": "sysedgeLicenseMode", "type": "string", "mode": ""}, + {"id": "priority", "type": "int", "mode": "w"}, + ) _relations = ManagedEntity._relations + ( - ("deviceClass", ToOne(ToManyCont, "Products.ZenModel.DeviceClass", - "devices")), - ("perfServer", ToOne(ToMany, "Products.ZenModel.PerformanceConf", - "devices")), + ( + "deviceClass", + ToOne(ToManyCont, "Products.ZenModel.DeviceClass", "devices"), + ), + ( + "perfServer", + ToOne(ToMany, "Products.ZenModel.PerformanceConf", "devices"), + ), ("location", ToOne(ToMany, "Products.ZenModel.Location", "devices")), ("systems", ToMany(ToMany, "Products.ZenModel.System", "devices")), ("groups", ToMany(ToMany, "Products.ZenModel.DeviceGroup", "devices")), - ("adminRoles", ToManyCont(ToOne,"Products.ZenModel.AdministrativeRole", - "managedObject")), - ('userCommands', ToManyCont(ToOne, 'Products.ZenModel.UserCommand', - 'commandable')), - ("ipaddress", ToOne(ToOne, "Products.ZenModel.IpAddress", "manageDevice")), + ( + "adminRoles", + ToManyCont( + ToOne, "Products.ZenModel.AdministrativeRole", "managedObject" + ), + ), + ( + "userCommands", + ToManyCont(ToOne, "Products.ZenModel.UserCommand", "commandable"), + ), + ( + "ipaddress", + ToOne(ToOne, "Products.ZenModel.IpAddress", "manageDevice"), + ), # unused: - ('monitors', ToMany(ToMany, 'Products.ZenModel.StatusMonitorConf', - 'devices')), - ) + ( + "monitors", + ToMany(ToMany, "Products.ZenModel.StatusMonitorConf", "devices"), + ), + ) # Screen action bindings (and tab definitions) factory_type_information = ( { - 'id' : 'Device', - 'meta_type' : 'Device', - 'description' : """Base class for all devices""", - 'icon' : 'Device_icon.gif', - 'product' : 'ZenModel', - 'factory' : 'manage_addDevice', - 'immediate_view' : 'devicedetail', - 'actions' : - ( - { 'id' : 'events' - , 'name' : 'Events' - , 'action' : 'viewEvents' - , 'permissions' : (ZEN_VIEW, ) + "id": "Device", + "meta_type": "Device", + "description": """Base class for all devices""", + "icon": "Device_icon.gif", + "product": "ZenModel", + "factory": "manage_addDevice", + "immediate_view": "devicedetail", + "actions": ( + { + "id": "events", + "name": "Events", + "action": "viewEvents", + "permissions": (ZEN_VIEW,), }, - { 'id' : 'perfServer' - , 'name' : 'Graphs' - , 'action' : 'viewDevicePerformance' - , 'permissions' : (ZEN_VIEW, ) + { + "id": "perfServer", + "name": "Graphs", + "action": "viewDevicePerformance", + "permissions": (ZEN_VIEW,), }, - { 'id' : 'edit' - , 'name' : 'Edit' - , 'action' : 'editDevice' - , 'permissions' : ("Change Device",) + { + "id": "edit", + "name": "Edit", + "action": "editDevice", + "permissions": ("Change Device",), }, - ) - }, - ) + ), + }, + ) security = ClassSecurityInfo() @@ -338,7 +404,7 @@ def __init__(self, id, buildRelations=True): self._setObject(osObj.id, osObj) hw = DeviceHW() self._setObject(hw.id, hw) - #self.commandStatus = "Not Tested" + # self.commandStatus = "Not Tested" self._lastPollSnmpUpTime = ZenStatus(0) self._snmpLastCollection = 0 self._lastChange = 0 @@ -351,7 +417,7 @@ def resetProductionState(self): self.setPreMWProductionState(DEFAULT_PRODSTATE) def isTempDevice(self): - flag = getattr(self, '_temp_device', None) + flag = getattr(self, "_temp_device", None) if flag is None: flag = self._temp_device = False return flag @@ -362,7 +428,8 @@ def name(self): """ return self.titleOrId() - security.declareProtected(ZEN_MANAGE_DMD, 'changeDeviceClass') + security.declareProtected(ZEN_MANAGE_DMD, "changeDeviceClass") + def changeDeviceClass(self, deviceClassPath, REQUEST=None): """ Wrapper for DeviceClass.moveDevices. The primary reason to use this @@ -375,18 +442,22 @@ def changeDeviceClass(self, deviceClassPath, REQUEST=None): @type REQUEST: Zope REQUEST object """ self.deviceClass().moveDevices(deviceClassPath, (self.id,)) - device = self.getDmdRoot('Devices').findDevice(self.id) + device = self.getDmdRoot("Devices").findDevice(self.id) if REQUEST: - audit('UI.Device.ChangeDeviceClass', self, deviceClass=deviceClassPath) + audit( + "UI.Device.ChangeDeviceClass", + self, + deviceClass=deviceClassPath, + ) return device.absolute_url_path() + @deprecated def getRRDTemplate(self): - """ - DEPRECATED - """ import warnings - warnings.warn('Device.getRRDTemplate is deprecated', - DeprecationWarning) + + warnings.warn( + "Device.getRRDTemplate is deprecated", DeprecationWarning + ) return ManagedEntity.getRRDTemplate(self) def getRRDTemplates(self): @@ -394,20 +465,36 @@ def getRRDTemplates(self): Returns all the templates bound to this Device @rtype: list - - >>> from Products.ZenModel.Device import manage_addDevice - >>> manage_addDevice(devices, 'test') - >>> devices.test.getRRDTemplates() - [] """ - if not hasattr(self, 'zDeviceTemplates'): + if not hasattr(self, "zDeviceTemplates"): return ManagedEntity.getRRDTemplates(self) - result = [] - for name in self.zDeviceTemplates: - template = self.getRRDTemplateByName(name) - if template: - result.append(template) - return result + templates = [] + for templateName in self.zDeviceTemplates: + if templateName.endswith("-replacement") or templateName.endswith( + "-addition" + ): + continue + + template = self.getRRDTemplateByName(templateName) + if not template: + continue + replacement = self.getRRDTemplateByName( + "{}-replacement".format(templateName) + ) + + if replacement and replacement not in templates: + templates.append(replacement) + else: + templates.append(template) + + addition = self.getRRDTemplateByName( + "{}-addition".format(templateName) + ) + + if addition and addition not in templates: + templates.append(addition) + + return templates def getDataSourceOptions(self): """ @@ -436,7 +523,7 @@ def sysUpTime(self): @rtype: int """ try: - return self.cacheRRDValue('sysUpTime', -1) + return self.cacheRRDValue("sysUpTime", -1) except Exception: log.exception("failed getting sysUpTime") return -1 @@ -449,6 +536,7 @@ def availability(self, *args, **kw): @todo: Performance enhancement: Should move import outside of method """ from Products.ZenEvents import Availability + results = Availability.query(self.dmd, device=self.id, *args, **kw) if results: return results[0] @@ -464,12 +552,12 @@ def __getattr__(self, name): @todo: Not sure this is needed, see getLastPollSnmpUpTime and getSnmpLastCollection """ - if name == 'lastPollSnmpUpTime': + if name == "lastPollSnmpUpTime": return self._lastPollSnmpUpTime.getStatus() - elif name == 'snmpLastCollection': + elif name == "snmpLastCollection": return DateTime(self._snmpLastCollection) else: - raise AttributeError( name ) + raise AttributeError(name) def _setPropValue(self, id, value): """ @@ -478,12 +566,13 @@ def _setPropValue(self, id, value): @todo: Not sure this is needed, see setSnmpLastCollection """ self._wrapperCheck(value) - if id == 'snmpLastCollection': + if id == "snmpLastCollection": self._snmpLastCollection = float(value) else: ManagedEntity._setPropValue(self, id, value) - security.declareProtected(ZEN_MANAGE_DEVICE, 'applyDataMap') + security.declareProtected(ZEN_MANAGE_DEVICE, "applyDataMap") + def applyDataMap( self, datamap, relname="", compname="", modname="", parentId="" ): @@ -508,13 +597,16 @@ def path(self): a MultiPathIndex. """ orgs = ( - self.systems() + - self.groups() + - [self.location()] + - [self.deviceClass()] - ) - return [ aq_base(self).__of__(o.primaryAq()).getPhysicalPath() \ - for o in orgs if o is not None ] + self.systems() + + self.groups() + + [self.location()] + + [self.deviceClass()] + ) + return [ + aq_base(self).__of__(o.primaryAq()).getPhysicalPath() + for o in orgs + if o is not None + ] def traceRoute(self, target, ippath=None): """ @@ -528,10 +620,12 @@ def traceRoute(self, target, ippath=None): @return: IP Addresses @rtype: list """ - if ippath is None: ippath=[] + if ippath is None: + ippath = [] if isinstance(target, basestring): target = self.findDevice(target) - if not target: raise ValueError("Target %s not found in DMD" % target) + if not target: + raise ValueError("Target %s not found in DMD" % target) return self.os.traceRoute(target, ippath) def getMonitoredComponents(self, collector=None, type=None): @@ -539,11 +633,16 @@ def getMonitoredComponents(self, collector=None, type=None): Return list of monitored DeviceComponents on this device. Wrapper method for getDeviceComponents """ - components = self.getDeviceComponents(monitored=True, - collector=collector, type=type) - return filter(lambda x: x.getProductionState() >= x.zProdStateThreshold, components) + components = self.getDeviceComponents( + monitored=True, collector=collector, type=type + ) + return filter( + lambda x: x.getProductionState() >= x.zProdStateThreshold, + components, + ) + + security.declareProtected(ZEN_VIEW, "getReportableComponents") - security.declareProtected(ZEN_VIEW, 'getReportableComponents') def getReportableComponents(self, collector=None, type=None): """ Return a list of DeviceComponents on this device that should be @@ -554,10 +653,10 @@ def getReportableComponents(self, collector=None, type=None): @permission: ZEN_VIEW @rtype: list """ - return self.getMonitoredComponents(collector=collector, type=type); + return self.getMonitoredComponents(collector=collector, type=type) def _createComponentSearchPathIndex(self): - indexName = 'getAllPaths' + indexName = "getAllPaths" if indexName not in self.componentSearch.indexes(): zcat = self._getOb("componentSearch") cat = zcat._catalog @@ -567,33 +666,38 @@ def _createComponentSearchPathIndex(self): def _create_componentSearch(self): from Products.ZCatalog.ZCatalog import manage_addZCatalog + manage_addZCatalog(self, "componentSearch", "componentSearch") zcat = self._getOb("componentSearch") cat = zcat._catalog - cat.addIndex('meta_type', makeCaseInsensitiveFieldIndex('meta_type')) - cat.addIndex('getCollectors', - makeCaseInsensitiveKeywordIndex('getCollectors')) - cat.addIndex('id', makeCaseInsensitiveFieldIndex('id')) - cat.addIndex('titleOrId', makeCaseInsensitiveFieldIndex('titleOrId')) - - zcat.addIndex('monitored', FieldIndex('monitored')) - zcat.addColumn('meta_type') - zcat.addColumn('getUUID') - zcat.addColumn('id') - zcat.addColumn('titleOrId') - zcat.addColumn('description') + cat.addIndex("meta_type", makeCaseInsensitiveFieldIndex("meta_type")) + cat.addIndex( + "getCollectors", makeCaseInsensitiveKeywordIndex("getCollectors") + ) + cat.addIndex("id", makeCaseInsensitiveFieldIndex("id")) + cat.addIndex("titleOrId", makeCaseInsensitiveFieldIndex("titleOrId")) + + zcat.addIndex("monitored", FieldIndex("monitored")) + zcat.addColumn("meta_type") + zcat.addColumn("getUUID") + zcat.addColumn("id") + zcat.addColumn("titleOrId") + zcat.addColumn("description") for c in self.getDeviceComponentsNoIndexGen(): c.index_object() - # see ZEN-4087 double index the first component when creating this catalog - # otherwise it will not appear in the list of components. + # See ZEN-4087 double index the first component when creating this + # catalog, otherwise it will not appear in the list of components. if len(self.componentSearch): self.componentSearch()[0].getObject().index_object() - def getDeviceComponents_from_model_catalog(self, monitored=None, collector=None, type=None): + def getDeviceComponents_from_model_catalog( + self, monitored=None, collector=None, type=None + ): """ - Return list of all DeviceComponents on this device extracted from model catalog. not used for now + Return list of all DeviceComponents on this device extracted from + model catalog. not used for now. @type monitored: boolean @type collector: string @@ -601,22 +705,27 @@ def getDeviceComponents_from_model_catalog(self, monitored=None, collector=None, @permission: ZEN_VIEW @rtype: list """ - query = {"objectImplements": "Products.ZenModel.DeviceComponent.DeviceComponent"} + query = { + "objectImplements": ( + "Products.ZenModel.DeviceComponent.DeviceComponent" + ) + } if collector is not None: - query['collectors'] = collector + query["collectors"] = collector if monitored is not None: - query['monitored'] = monitored + query["monitored"] = monitored if type is not None: - query['meta_type'] = type + query["meta_type"] = type cat = IModelCatalogTool(self) search_results = cat.search(query=query) results = [] if search_results.total > 0: - results = [ brain.getObject() for brain in search_results.results ] + results = [brain.getObject() for brain in search_results.results] return results - security.declareProtected(ZEN_VIEW, 'getDeviceComponents') + security.declareProtected(ZEN_VIEW, "getDeviceComponents") + def getDeviceComponents(self, monitored=None, collector=None, type=None): """ Return list of all DeviceComponents on this device. @@ -629,16 +738,16 @@ def getDeviceComponents(self, monitored=None, collector=None, type=None): """ # Auto-migrate component catalog for this device # See ZEN-2537 for reason for this change - if getattr(aq_base(self), 'componentSearch', None) is None: + if getattr(aq_base(self), "componentSearch", None) is None: self._create_componentSearch() query = {} if collector is not None: - query['getCollectors'] = collector + query["getCollectors"] = collector if monitored is not None: - query['monitored'] = monitored + query["monitored"] = monitored if type is not None: - query['meta_type'] = type + query["meta_type"] = type return list(getObjectsFromCatalog(self.componentSearch, query, log)) @@ -649,11 +758,14 @@ def getDeviceComponentsNoIndexGen(self): component index. It is used when rebuilding the device indexes. """ from DeviceComponent import DeviceComponent + for baseObject in (self, self.os, self.hw): for rel in baseObject.getRelationships(): - if rel.meta_type != "ToManyContRelationship": continue + if rel.meta_type != "ToManyContRelationship": + continue for obj in rel(): - if not isinstance(obj, DeviceComponent): break + if not isinstance(obj, DeviceComponent): + break for subComp in obj.getSubComponentsNoIndexGen(): yield subComp yield obj @@ -665,12 +777,11 @@ def getSnmpConnInfo(self): @rtype: SnmpConnInfo object """ from Products.ZenHub.services.PerformanceConfig import SnmpConnInfo + return SnmpConnInfo(self) def getHWManufacturerName(self): """ - DEPRECATED - Return the hardware manufacturer name of this device. - @rtype: string @todo: Remove this method and remove the call from testDevice.py """ @@ -696,8 +807,6 @@ def getHWProductClass(self): def getHWProductKey(self): """ - DEPRECATED - Return the productKey of the device hardware. - @rtype: string @todo: Remove this method and remove the call from testDevice.py """ @@ -705,8 +814,6 @@ def getHWProductKey(self): def getOSManufacturerName(self): """ - DEPRECATED - Return the OS manufacturer name of this device. - @rtype: string @todo: Remove this method and remove the call from testDevice.py """ @@ -714,8 +821,6 @@ def getOSManufacturerName(self): def getOSProductName(self): """ - DEPRECATED - Return the OS product name of this device. - @rtype: string @todo: Remove this method and remove the call from testDevice.py """ @@ -723,14 +828,13 @@ def getOSProductName(self): def getOSProductKey(self): """ - DEPRECATED - Return the productKey of the device OS. - @rtype: string @todo: Remove this method and remove the call from testDevice.py """ return self.os.getProductKey() - security.declareProtected(ZEN_CHANGE_DEVICE, 'setOSProductKey') + security.declareProtected(ZEN_CHANGE_DEVICE, "setOSProductKey") + def setOSProductKey(self, prodKey, manufacturer=None): """ Set the productKey of the device OS. @@ -739,28 +843,29 @@ def setOSProductKey(self, prodKey, manufacturer=None): def getHWTag(self): """ - DEPRECATED - Return the tag of the device HW. - @rtype: string @todo: remove this method and remove the call from testDevice.py """ return self.hw.tag - security.declareProtected(ZEN_CHANGE_DEVICE, 'setHWTag') + security.declareProtected(ZEN_CHANGE_DEVICE, "setHWTag") + def setHWTag(self, assettag): """ Set the asset tag of the device hardware. """ self.hw.tag = assettag - security.declareProtected(ZEN_CHANGE_DEVICE, 'setHWProductKey') + security.declareProtected(ZEN_CHANGE_DEVICE, "setHWProductKey") + def setHWProductKey(self, prodKey, manufacturer=None): """ Set the productKey of the device hardware. """ self.hw.setProductKey(prodKey, manufacturer) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setHWSerialNumber') + security.declareProtected(ZEN_CHANGE_DEVICE, "setHWSerialNumber") + def setHWSerialNumber(self, number): """ Set the hardware serial number. @@ -769,8 +874,6 @@ def setHWSerialNumber(self, number): def getHWSerialNumber(self): """ - DEPRECATED - Return the hardware serial number. - @rtype: string @todo: Remove this method and remove the call from testDevice.py """ @@ -786,11 +889,13 @@ def followNextHopIps(self): ips = [] for r in self.os.routes(): ipobj = r.nexthop() - #if ipobj and not ipobj.device(): - if ipobj: ips.append(ipobj.id) + # if ipobj and not ipobj.device(): + if ipobj: + ips.append(ipobj.id) return ips - security.declareProtected(ZEN_VIEW, 'getLocationName') + security.declareProtected(ZEN_VIEW, "getLocationName") + def getLocationName(self): """ Return the location name. i.e. "Rack" from /Locations/Loc/SubLoc/Rack @@ -799,10 +904,12 @@ def getLocationName(self): @permission: ZEN_VIEW """ loc = self.location() - if loc: return loc.getOrganizerName() + if loc: + return loc.getOrganizerName() return "" - security.declareProtected(ZEN_VIEW, 'getLocationLink') + security.declareProtected(ZEN_VIEW, "getLocationLink") + def getLocationLink(self): """ Return a link to the device's location. @@ -813,13 +920,16 @@ def getLocationLink(self): loc = self.location() if loc: if self.checkRemotePerm(ZEN_VIEW, loc): - return "%s" % (loc.getPrimaryUrlPath(), - loc.getOrganizerName()) + return "%s" % ( + loc.getPrimaryUrlPath(), + loc.getOrganizerName(), + ) else: return loc.getOrganizerName() return "None" - security.declareProtected(ZEN_VIEW, 'getSystemNames') + security.declareProtected(ZEN_VIEW, "getSystemNames") + def getSystemNames(self): """ Return the system names for this device @@ -829,8 +939,9 @@ def getSystemNames(self): """ return map(lambda x: x.getOrganizerName(), self.systems()) - security.declareProtected(ZEN_VIEW, 'getSystemNamesString') - def getSystemNamesString(self, sep=', '): + security.declareProtected(ZEN_VIEW, "getSystemNamesString") + + def getSystemNamesString(self, sep=", "): """ Return the system names for this device as a string @@ -839,7 +950,8 @@ def getSystemNamesString(self, sep=', '): """ return sep.join(self.getSystemNames()) - security.declareProtected(ZEN_VIEW, 'getDeviceGroupNames') + security.declareProtected(ZEN_VIEW, "getDeviceGroupNames") + def getDeviceGroupNames(self): """ Return the device group names for this device @@ -849,7 +961,8 @@ def getDeviceGroupNames(self): """ return map(lambda x: x.getOrganizerName(), self.groups()) - security.declareProtected(ZEN_VIEW, 'getPerformanceServer') + security.declareProtected(ZEN_VIEW, "getPerformanceServer") + def getPerformanceServer(self): """ Return the device performance server @@ -859,7 +972,8 @@ def getPerformanceServer(self): """ return self.perfServer() - security.declareProtected(ZEN_VIEW, 'getPerformanceServerName') + security.declareProtected(ZEN_VIEW, "getPerformanceServerName") + def getPerformanceServerName(self): """ Return the device performance server name @@ -868,15 +982,16 @@ def getPerformanceServerName(self): @permission: ZEN_VIEW """ cr = self.perfServer() - if cr: return cr.getId() - return '' + if cr: + return cr.getId() + return "" def getNetworkRoot(self, version=None): - """Return the network root object - """ - return self.getDmdRoot('Networks').getNetworkRoot(version) + """Return the network root object""" + return self.getDmdRoot("Networks").getNetworkRoot(version) + + security.declareProtected(ZEN_VIEW, "getLastChange") - security.declareProtected(ZEN_VIEW, 'getLastChange') def getLastChange(self): """ Return DateTime of last change detected on this device. @@ -886,7 +1001,8 @@ def getLastChange(self): """ return DateTime(float(self._lastChange)) - security.declareProtected(ZEN_VIEW, 'getLastChangeString') + security.declareProtected(ZEN_VIEW, "getLastChangeString") + def getLastChangeString(self): """ Return date string of last change detected on this device. @@ -896,7 +1012,8 @@ def getLastChangeString(self): """ return Time.LocalDateTimeSecsResolution(float(self._lastChange)) - security.declareProtected(ZEN_VIEW, 'getSnmpLastCollection') + security.declareProtected(ZEN_VIEW, "getSnmpLastCollection") + def getSnmpLastCollection(self): """ Return DateTime of last SNMP collection on this device. @@ -906,7 +1023,8 @@ def getSnmpLastCollection(self): """ return DateTime(float(self._snmpLastCollection)) - security.declareProtected(ZEN_VIEW, 'getSnmpLastCollectionString') + security.declareProtected(ZEN_VIEW, "getSnmpLastCollectionString") + def getSnmpLastCollectionString(self): """ Return date string of last SNMP collection on this device. @@ -915,15 +1033,17 @@ def getSnmpLastCollectionString(self): @permission: ZEN_VIEW """ if self._snmpLastCollection: - return Time.LocalDateTimeSecsResolution(float(self._snmpLastCollection)) + return Time.LocalDateTimeSecsResolution( + float(self._snmpLastCollection) + ) return "Not Modeled" def _sanitizeIPaddress(self, ip): try: if not ip: - pass # Forcing a reset with a blank IP + pass # Forcing a reset with a blank IP elif ip.find("/") > -1: - ipWithoutNetmask, netmask = ip.split("/",1) + ipWithoutNetmask, netmask = ip.split("/", 1) checkip(ipWithoutNetmask) # Also check for valid netmask if they give us one if maskToBits(netmask) is None: @@ -932,15 +1052,15 @@ def _sanitizeIPaddress(self, ip): checkip(ip) if ip: # Strip out subnet mask before checking if it's a good IP - netmask = '' - if '/' in ip: - netmask = ip.split('/')[1] - ip = str(IPAddress(ipunwrap(ip.split('/')[0]))) + netmask = "" + if "/" in ip: + netmask = ip.split("/")[1] + ip = str(IPAddress(ipunwrap(ip.split("/")[0]))) if netmask: - ip = '/'.join([ip, netmask]) + ip = "/".join([ip, netmask]) except (IpAddressError, ValueError, NoNetMask): log.warn("%s is an invalid IP address", ip) - ip = '' + ip = "" return ip def _isDuplicateIp(self, ip): @@ -951,7 +1071,8 @@ def _isDuplicateIp(self, ip): return True return False - security.declareProtected(ZEN_ADMIN_DEVICE, 'setManageIp') + security.declareProtected(ZEN_ADMIN_DEVICE, "setManageIp") + def setManageIp(self, ip="", REQUEST=None): """ Set the manage IP, if IP is not passed perform DNS lookup. @@ -961,33 +1082,36 @@ def setManageIp(self, ip="", REQUEST=None): @rtype: string @permission: ZEN_ADMIN_DEVICE """ - message = '' - ip = ip.replace(' ', '') + message = "" + ip = ip.replace(" ", "") origip = ip ip = self._sanitizeIPaddress(ip) - if not ip: # What if they put in a DNS name? + if not ip: # What if they put in a DNS name? try: ip = getHostByName(origip) - if ip == '0.0.0.0': + if ip == "0.0.0.0": # Host resolution failed - ip = '' + ip = "" except socket.error: - ip = '' + ip = "" if not ip: try: ip = getHostByName(ipunwrap(self.id)) except socket.error: - ip = '' + ip = "" if origip: - message = ("%s is an invalid IP address, " - "and no appropriate IP could" - " be found via DNS for %s") % (origip, self.id) + message = ( + "%s is an invalid IP address, " + "and no appropriate IP could" + " be found via DNS for %s" + ) % (origip, self.id) log.warn(message) else: - message = "DNS lookup of '%s' failed to return an IP" % \ - self.id + message = ( + "DNS lookup of '%s' failed to return an IP" % self.id + ) if ip: if self._isDuplicateIp(ip): @@ -996,20 +1120,26 @@ def setManageIp(self, ip="", REQUEST=None): else: self.manageIp = ip - notify(IndexingEvent(self, ('decimal_ipAddress', 'text_ipAddress'), True)) - log.info("%s's IP address has been set to %s.", - self.id, ip) - #Create a new IpAddress object from manageIp under the Network + notify( + IndexingEvent( + self, ("decimal_ipAddress", "text_ipAddress"), True + ) + ) + log.info("%s's IP address has been set to %s.", self.id, ip) + # Create a new IpAddress object from manageIp under the Network ipWithoutNetmask, netmask = ipAndMaskFromIpMask(ip) - ipobj = self.getNetworkRoot().createIp(ipWithoutNetmask, netmask) + ipobj = self.getNetworkRoot().createIp( + ipWithoutNetmask, netmask + ) self.ipaddress.addRelation(ipobj) notify(IndexingEvent(ipobj)) if REQUEST: - audit('UI.Device.ResetIP', self, ip=ip) + audit("UI.Device.ResetIP", self, ip=ip) return message - security.declareProtected(ZEN_VIEW, 'getManageIp') + security.declareProtected(ZEN_VIEW, "getManageIp") + def getManageIp(self): """ Return the management ip for this device. @@ -1019,6 +1149,7 @@ def getManageIp(self): """ return self.manageIp + @deprecated def getManageIpObj(self): """ DEPRECATED - Return the management ipobject for this device. @@ -1029,7 +1160,8 @@ def getManageIpObj(self): if self.manageIp: return self.Networks.findIp(self.manageIp) - security.declareProtected(ZEN_VIEW, 'getManageInterface') + security.declareProtected(ZEN_VIEW, "getManageInterface") + def getManageInterface(self): """ Return the management interface of a device based on its manageIp. @@ -1038,9 +1170,11 @@ def getManageInterface(self): @permission: ZEN_VIEW """ ipobj = self.Networks.findIp(self.manageIp) - if ipobj: return ipobj.interface() + if ipobj: + return ipobj.interface() + + security.declareProtected(ZEN_VIEW, "uptimeStr") - security.declareProtected(ZEN_VIEW, 'uptimeStr') def uptimeStr(self): """ Return the SNMP uptime @@ -1054,13 +1188,12 @@ def uptimeStr(self): return "Unknown" elif ut == 0: return "0d:0h:0m:0s" - ut = float(ut)/100. - days = int(ut/86400) - hour = int((ut%86400)/3600) - mins = int((ut%3600)/60) - secs = int(ut%60) - return "%02dd:%02dh:%02dm:%02ds" % ( - days, hour, mins, secs) + ut = float(ut) / 100.0 + days = int(ut / 86400) + hour = int((ut % 86400) / 3600) + mins = int((ut % 3600) / 60) + secs = int(ut % 60) + return "%02dd:%02dh:%02dm:%02ds" % (days, hour, mins, secs) def getPeerDeviceClassNames(self): """ @@ -1075,7 +1208,8 @@ def getPeerDeviceClassNames(self): # Edit functions used to manage device relations and other attributes #################################################################### - security.declareProtected(ZEN_CHANGE_DEVICE, 'manage_snmpCommunity') + security.declareProtected(ZEN_CHANGE_DEVICE, "manage_snmpCommunity") + def manage_snmpCommunity(self): """ Reset the snmp community using the zSnmpCommunities variable. @@ -1083,9 +1217,13 @@ def manage_snmpCommunity(self): @permission: ZEN_CHANGE_DEVICE """ try: - zSnmpCommunity, zSnmpPort, zSnmpVer, snmpname = \ - findCommunity(self, self.manageIp, self.getDeviceClassPath(), - port=self.zSnmpPort, version=self.zSnmpVer) + zSnmpCommunity, zSnmpPort, zSnmpVer, snmpname = findCommunity( + self, + self.manageIp, + self.getDeviceClassPath(), + port=self.zSnmpPort, + version=self.zSnmpVer, + ) except NoSnmp: pass else: @@ -1096,13 +1234,25 @@ def manage_snmpCommunity(self): if self.zSnmpVer != zSnmpVer: self.setZenProperty("zSnmpVer", zSnmpVer) - def setProductInfo(self, hwManufacturer="", hwProductName="", - osManufacturer="", osProductName=""): + def setProductInfo( + self, + hwManufacturer="", + hwProductName="", + osManufacturer="", + osProductName="", + ): if hwManufacturer and hwProductName: # updateDevice uses the sentinel value "_no_change" to indicate # that we really don't want change this value - if hwManufacturer != "_no_change" and hwProductName != "_no_change": - log.info("setting hardware manufacturer to %r productName to %r", hwManufacturer, hwProductName) + if ( + hwManufacturer != "_no_change" + and hwProductName != "_no_change" + ): + log.info( + "setting hardware manufacturer to %r productName to %r", + hwManufacturer, + hwProductName, + ) self.hw.setProduct(hwProductName, hwManufacturer) else: self.hw.removeProductClass() @@ -1110,14 +1260,22 @@ def setProductInfo(self, hwManufacturer="", hwProductName="", if osManufacturer and osProductName: # updateDevice uses the sentinel value "_no_change" to indicate # that we really don't want change this value - if osManufacturer != "_no_change" and osProductName != "_no_change": - log.info("setting os manufacturer to %r productName to %r", osManufacturer, osProductName) + if ( + osManufacturer != "_no_change" + and osProductName != "_no_change" + ): + log.info( + "setting os manufacturer to %r productName to %r", + osManufacturer, + osProductName, + ) self.os.setProduct(osProductName, osManufacturer, isOS=True) else: self.os.removeProductClass() - security.declareProtected(ZEN_CHANGE_DEVICE, 'updateDevice') - def updateDevice(self,**kwargs): + security.declareProtected(ZEN_CHANGE_DEVICE, "updateDevice") + + def updateDevice(self, **kwargs): """ Update the device relation and attributes, if passed. If any parameter is not passed it will not be updated; the value of any unpassed device @@ -1129,9 +1287,12 @@ def updateDevice(self,**kwargs): tag -- tag number [string] serialNumber -- serial number [string] zProperties -- dict of zProperties [dict] - zSnmpCommunity -- snmp community (overrides corresponding value is zProperties) [string] - zSnmpPort -- snmp port (overrides corresponding value in zProperties) [string] - zSnmpVer -- snmp version (overrides corresponding value in zProperties) [string] + zSnmpCommunity -- snmp community (overrides corresponding + value is zProperties) [string] + zSnmpPort -- snmp port (overrides corresponding value + in zProperties) [string] + zSnmpVer -- snmp version (overrides corresponding value + in zProperties) [string] rackSlot -- rack slot number [integer] productionState -- production state of device [integer] priority -- device priority [integer] @@ -1146,28 +1307,38 @@ def updateDevice(self,**kwargs): performanceMonitor -- collector name [string] """ - if 'title' in kwargs and kwargs['title'] is not None: - newTitle = str(kwargs['title']).strip() + if "title" in kwargs and kwargs["title"] is not None: + newTitle = str(kwargs["title"]).strip() if newTitle and newTitle != self.title: log.info("setting title to %r", newTitle) self.title = newTitle - if 'tag' in kwargs and kwargs['tag'] is not None and kwargs['tag'] != self.hw.tag: - log.info("setting tag to %r", kwargs['tag']) - self.hw.tag = kwargs['tag'] - if 'serialNumber' in kwargs and kwargs['serialNumber'] is not None and \ - kwargs['serialNumber'] != self.hw.serialNumber: - log.info("setting serialNumber to %r", kwargs['serialNumber']) - self.hw.serialNumber = kwargs['serialNumber'] + if ( + "tag" in kwargs + and kwargs["tag"] is not None + and kwargs["tag"] != self.hw.tag + ): + log.info("setting tag to %r", kwargs["tag"]) + self.hw.tag = kwargs["tag"] + if ( + "serialNumber" in kwargs + and kwargs["serialNumber"] is not None + and kwargs["serialNumber"] != self.hw.serialNumber + ): + log.info("setting serialNumber to %r", kwargs["serialNumber"]) + self.hw.serialNumber = kwargs["serialNumber"] # Set zProperties passed in intelligently - if 'zProperties' in kwargs and kwargs['zProperties'] is not None: - zProperties = kwargs['zProperties'] + if "zProperties" in kwargs and kwargs["zProperties"] is not None: + zProperties = kwargs["zProperties"] else: zProperties = {} # override any snmp properties that may be in zProperties - zpropUpdate = dict((name, kwargs[name]) for name in ('zSnmpCommunity', 'zSnmpPort', 'zSnmpVer') - if name in kwargs) + zpropUpdate = dict( + (name, kwargs[name]) + for name in ("zSnmpCommunity", "zSnmpPort", "zSnmpVer") + if name in kwargs + ) zProperties.update(zpropUpdate) # apply any zProperties to self @@ -1177,68 +1348,93 @@ def updateDevice(self,**kwargs): # need to check here self.setZenProperty(prop, value) - if 'rackSlot' in kwargs and kwargs['rackSlot'] != self.rackSlot: + if "rackSlot" in kwargs and kwargs["rackSlot"] != self.rackSlot: # rackSlot may be a string or integer log.info("setting rackSlot to %r", kwargs["rackSlot"]) self.rackSlot = kwargs["rackSlot"] - if 'productionState' in kwargs: - # Always set production state, but don't log it if it didn't change. - if kwargs['productionState'] != self.getProductionState(): - prodStateName = self.dmd.convertProdState(int(kwargs['productionState'])) + if "productionState" in kwargs: + # Always set production state, + # but don't log it if it didn't change. + if kwargs["productionState"] != self.getProductionState(): + prodStateName = self.dmd.convertProdState( + int(kwargs["productionState"]) + ) log.info("setting productionState to %s", prodStateName) self.setProdState(kwargs["productionState"]) - if 'priority' in kwargs and int(kwargs['priority']) != self.priority: - priorityName = self.dmd.convertPriority(kwargs['priority']) + if "priority" in kwargs and int(kwargs["priority"]) != self.priority: + priorityName = self.dmd.convertPriority(kwargs["priority"]) log.info("setting priority to %s", priorityName) self.setPriority(kwargs["priority"]) - if 'comments' in kwargs and kwargs['comments'] != self.comments: + if "comments" in kwargs and kwargs["comments"] != self.comments: log.info("setting comments to %r", kwargs["comments"]) self.comments = kwargs["comments"] - self.setProductInfo(hwManufacturer=kwargs.get("hwManufacturer","_no_change"), - hwProductName=kwargs.get("hwProductName","_no_change"), - osManufacturer=kwargs.get("osManufacturer","_no_change"), - osProductName=kwargs.get("osProductName","_no_change")) + self.setProductInfo( + hwManufacturer=kwargs.get("hwManufacturer", "_no_change"), + hwProductName=kwargs.get("hwProductName", "_no_change"), + osManufacturer=kwargs.get("osManufacturer", "_no_change"), + osProductName=kwargs.get("osProductName", "_no_change"), + ) if kwargs.get("locationPath", False): log.info("setting location to %r", kwargs["locationPath"]) self.setLocation(kwargs["locationPath"]) - if kwargs.get("groupPaths",False): + if kwargs.get("groupPaths", False): log.info("setting group %r", kwargs["groupPaths"]) self.setGroups(kwargs["groupPaths"]) - if kwargs.get("systemPaths",False): + if kwargs.get("systemPaths", False): log.info("setting system %r", kwargs["systemPaths"]) self.setSystems(kwargs["systemPaths"]) - if 'performanceMonitor' in kwargs and \ - kwargs["performanceMonitor"] != self.getPerformanceServerName(): - log.info("setting performance monitor to %r", kwargs["performanceMonitor"]) + if ( + "performanceMonitor" in kwargs + and kwargs["performanceMonitor"] != self.getPerformanceServerName() + ): + log.info( + "setting performance monitor to %r", + kwargs["performanceMonitor"], + ) self.setPerformanceMonitor(kwargs["performanceMonitor"]) self.setLastChange() notify(IndexingEvent(self)) - security.declareProtected(ZEN_CHANGE_DEVICE, 'manage_editDevice') - def manage_editDevice(self, - tag="", serialNumber="", - zSnmpCommunity=None, zSnmpPort=161, zSnmpVer=None, - rackSlot="", productionState=DEFAULT_PRODSTATE, comments="", - hwManufacturer="", hwProductName="", - osManufacturer="", osProductName="", - locationPath="", groupPaths=[], systemPaths=[], - performanceMonitor="localhost", priority=3, - zProperties=None, title=None, REQUEST=None): - """ - Edit the device relation and attributes. This method will update device - properties because of the default values that are passed. Calling this - method using a **kwargs dict will result in default values being set for - many device properties. To update only a subset of these properties use - updateDevice(**kwargs). + security.declareProtected(ZEN_CHANGE_DEVICE, "manage_editDevice") + + def manage_editDevice( + self, + tag="", + serialNumber="", + zSnmpCommunity=None, + zSnmpPort=161, + zSnmpVer=None, + rackSlot="", + productionState=DEFAULT_PRODSTATE, + comments="", + hwManufacturer="", + hwProductName="", + osManufacturer="", + osProductName="", + locationPath="", + groupPaths=[], + systemPaths=[], + performanceMonitor="localhost", + priority=3, + zProperties=None, + title=None, + REQUEST=None, + ): + """ + Edit the device relation and attributes. + This method will update device properties because of the default + values that are passed. Calling this method using a **kwargs dict will + result in default values being set for many device properties. To + update only a subset of these properties use updateDevice(**kwargs). @param locationPath: path to a Location @type locationPath: string @@ -1251,30 +1447,45 @@ def manage_editDevice(self, @permission: ZEN_CHANGE_DEVICE """ self.updateDevice( - tag=tag, serialNumber=serialNumber, - zSnmpCommunity=zSnmpCommunity, zSnmpPort=zSnmpPort, zSnmpVer=zSnmpVer, - rackSlot=rackSlot, productionState=productionState, comments=comments, - hwManufacturer=hwManufacturer, hwProductName=hwProductName, - osManufacturer=osManufacturer, osProductName=osProductName, - locationPath=locationPath, groupPaths=groupPaths, systemPaths=systemPaths, - performanceMonitor=performanceMonitor, priority=priority, - zProperties=zProperties, title=title, REQUEST=REQUEST) + tag=tag, + serialNumber=serialNumber, + zSnmpCommunity=zSnmpCommunity, + zSnmpPort=zSnmpPort, + zSnmpVer=zSnmpVer, + rackSlot=rackSlot, + productionState=productionState, + comments=comments, + hwManufacturer=hwManufacturer, + hwProductName=hwProductName, + osManufacturer=osManufacturer, + osProductName=osProductName, + locationPath=locationPath, + groupPaths=groupPaths, + systemPaths=systemPaths, + performanceMonitor=performanceMonitor, + priority=priority, + zProperties=zProperties, + title=title, + REQUEST=REQUEST, + ) if REQUEST: from Products.ZenUtils.Time import SaveMessage + IMessageSender(self).sendToBrowser("Saved", SaveMessage()) # TODO: Audit all of the changed values. # How is this method called to test the output? # Will the [zProperties] field show password values? - audit('UI.Device.Edit', self) + audit("UI.Device.Edit", self) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setTitle') + security.declareProtected(ZEN_CHANGE_DEVICE, "setTitle") + def setTitle(self, newTitle): """ Changes the title to newTitle and reindexes the object """ super(Device, self).setTitle(newTitle) - notify(IndexingEvent(self, ('name',), True)) + notify(IndexingEvent(self, ("name",), True)) def monitorDevice(self): """ @@ -1282,8 +1493,10 @@ def monitorDevice(self): @rtype: boolean """ - return (self.getProductionState() >= self.zProdStateThreshold - and not self.renameInProgress) + return ( + self.getProductionState() >= self.zProdStateThreshold + and not self.renameInProgress + ) def snmpMonitorDevice(self): """ @@ -1291,9 +1504,11 @@ def snmpMonitorDevice(self): @rtype: boolean """ - return (self.monitorDevice() - and self.getManageIp() - and not self.zSnmpMonitorIgnore) + return ( + self.monitorDevice() + and self.getManageIp() + and not self.zSnmpMonitorIgnore + ) def getPriority(self): """ @@ -1333,7 +1548,8 @@ def getSnmpStatusString(self): return str(self.convertStatus(result)) return "Down" - security.declareProtected(ZEN_CHANGE_DEVICE_PRODSTATE, 'setProdState') + security.declareProtected(ZEN_CHANGE_DEVICE_PRODSTATE, "setProdState") + def setProdState(self, state, maintWindowChange=False, REQUEST=None): """ Set the device's production state. @@ -1345,14 +1561,21 @@ def setProdState(self, state, maintWindowChange=False, REQUEST=None): @permission: ZEN_CHANGE_DEVICE """ # Set production state on all components that inherit from this device - ret = super(Device, self).setProdState(state, maintWindowChange, REQUEST) + ret = super(Device, self).setProdState( + state, maintWindowChange, REQUEST + ) self._p_changed = True if REQUEST: - audit('UI.Device.Edit', self, productionState=state, - maintenanceWindowChange=maintWindowChange) + audit( + "UI.Device.Edit", + self, + productionState=state, + maintenanceWindowChange=maintWindowChange, + ) return ret - security.declareProtected(ZEN_CHANGE_DEVICE, 'setPriority') + security.declareProtected(ZEN_CHANGE_DEVICE, "setPriority") + def setPriority(self, priority, REQUEST=None): """ Set the device's priority @@ -1363,14 +1586,15 @@ def setPriority(self, priority, REQUEST=None): self.priority = int(priority) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Priority Updated', - "Device priority has been set to %s." % ( - self.getPriorityString()) + "Priority Updated", + "Device priority has been set to %s." + % (self.getPriorityString()), ) - audit('UI.Device.Edit', self, priority=priority) + audit("UI.Device.Edit", self, priority=priority) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setLastChange') + security.declareProtected(ZEN_CHANGE_DEVICE, "setLastChange") + def setLastChange(self, value=None): """ Set the changed datetime for this device. @@ -1383,7 +1607,8 @@ def setLastChange(self, value=None): value = time.time() self._lastChange = float(value) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setSnmpLastCollection') + security.declareProtected(ZEN_CHANGE_DEVICE, "setSnmpLastCollection") + def setSnmpLastCollection(self, value=None): """ Set the last time snmp collection occurred. @@ -1396,39 +1621,40 @@ def setSnmpLastCollection(self, value=None): value = time.time() self._snmpLastCollection = float(value) - security.declareProtected(ZEN_CHANGE_DEVICE, 'addManufacturer') - def addManufacturer(self, newHWManufacturerName=None, - newSWManufacturerName=None, REQUEST=None): - """ - DEPRECATED - - Add either a hardware or software manufacturer to the database. + security.declareProtected(ZEN_CHANGE_DEVICE, "addManufacturer") + def addManufacturer( + self, + newHWManufacturerName=None, + newSWManufacturerName=None, + REQUEST=None, + ): + """ @permission: ZEN_CHANGE_DEVICE @todo: Doesn't really do work on a device object. Already exists on ZDeviceLoader """ mname = newHWManufacturerName - field = 'hwManufacturer' + field = "hwManufacturer" if not mname: mname = newSWManufacturerName - field = 'osManufacturer' + field = "osManufacturer" self.getDmdRoot("Manufacturers").createManufacturer(mname) if REQUEST: REQUEST[field] = mname messaging.IMessageSender(self).sendToBrowser( - 'Manufacturer Added', - 'The %s manufacturer has been created.' % mname + "Manufacturer Added", + "The %s manufacturer has been created." % mname, ) - audit('UI.Device.AddManufacturer', self, manufacturer=mname) + audit("UI.Device.AddManufacturer", self, manufacturer=mname) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setHWProduct') - def setHWProduct(self, newHWProductName=None, hwManufacturer=None, - REQUEST=None): - """ - DEPRECATED - - Adds a new hardware product + security.declareProtected(ZEN_CHANGE_DEVICE, "setHWProduct") + def setHWProduct( + self, newHWProductName=None, hwManufacturer=None, REQUEST=None + ): + """ @permission: ZEN_CHANGE_DEVICE @todo: Doesn't really do work on a device object. Already exists on ZDeviceLoader @@ -1436,59 +1662,72 @@ def setHWProduct(self, newHWProductName=None, hwManufacturer=None, added = False if newHWProductName and hwManufacturer: self.getDmdRoot("Manufacturers").createHardwareProduct( - newHWProductName, hwManufacturer) + newHWProductName, hwManufacturer + ) added = True if REQUEST: if added: messaging.IMessageSender(self).sendToBrowser( - 'Product Set', - 'Hardware product has been set to %s.' % newHWProductName + "Product Set", + "Hardware product has been set to %s." % newHWProductName, + ) + REQUEST["hwProductName"] = newHWProductName + audit( + "UI.Device.SetHWProduct", + self, + manufacturer=hwManufacturer, + product=newHWProductName, ) - REQUEST['hwProductName'] = newHWProductName - audit('UI.Device.SetHWProduct', self, manufacturer=hwManufacturer, - product=newHWProductName) else: messaging.IMessageSender(self).sendToBrowser( - 'Set Product Failed', - 'Hardware product could not be set to %s.'%newHWProductName, - priority=messaging.WARNING + "Set Product Failed", + "Hardware product could not be set to %s." + % newHWProductName, + priority=messaging.WARNING, ) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setOSProduct') - def setOSProduct(self, newOSProductName=None, osManufacturer=None, REQUEST=None): - """ - DEPRECATED - Adds a new os product + security.declareProtected(ZEN_CHANGE_DEVICE, "setOSProduct") + def setOSProduct( + self, newOSProductName=None, osManufacturer=None, REQUEST=None + ): + """ @permission: ZEN_CHANGE_DEVICE @todo: Doesn't really do work on a device object. Already exists on ZDeviceLoader """ if newOSProductName: self.getDmdRoot("Manufacturers").createSoftwareProduct( - newOSProductName, osManufacturer, isOS=True) + newOSProductName, osManufacturer, isOS=True + ) if REQUEST: if newOSProductName: messaging.IMessageSender(self).sendToBrowser( - 'Product Set', - 'OS product has been set to %s.' % newOSProductName + "Product Set", + "OS product has been set to %s." % newOSProductName, + ) + REQUEST["osProductName"] = newOSProductName + audit( + "UI.Device.SetOSProduct", + self, + manufacturer=osManufacturer, + product=newOSProductName, ) - REQUEST['osProductName'] = newOSProductName - audit('UI.Device.SetOSProduct', self, manufacturer=osManufacturer, - product=newOSProductName) else: messaging.IMessageSender(self).sendToBrowser( - 'Set Product Failed', - 'OS product could not be set to %s.' % newOSProductName, - priority=messaging.WARNING + "Set Product Failed", + "OS product could not be set to %s." % newOSProductName, + priority=messaging.WARNING, ) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setLocation') + security.declareProtected(ZEN_CHANGE_DEVICE, "setLocation") + def setLocation(self, locationPath, REQUEST=None): """ - Set the location of a device. If the location is new it will be created. + Set the location of a device. + If the location is new it will be created. @permission: ZEN_CHANGE_DEVICE """ @@ -1498,33 +1737,33 @@ def setLocation(self, locationPath, REQUEST=None): locobj = self.getDmdRoot("Locations").createOrganizer(locationPath) self.addRelation("location", locobj) self.setAdminLocalRoles() - notify(IndexingEvent(self, 'path', False)) + notify(IndexingEvent(self, "path", False)) if REQUEST: - action = 'SetLocation' if locationPath else 'RemoveFromLocation' - audit(['UI.Device', action], self, location=locationPath) + action = "SetLocation" if locationPath else "RemoveFromLocation" + audit(["UI.Device", action], self, location=locationPath) + + security.declareProtected(ZEN_CHANGE_DEVICE, "addLocation") - security.declareProtected(ZEN_CHANGE_DEVICE, 'addLocation') def addLocation(self, newLocationPath, REQUEST=None): """ - DEPRECATED - Add a new location and relate it to this device - @todo: Doesn't really do work on a device object. Already exists on ZDeviceLoader """ self.getDmdRoot("Locations").createOrganizer(newLocationPath) if REQUEST: - REQUEST['locationPath'] = newLocationPath + REQUEST["locationPath"] = newLocationPath messaging.IMessageSender(self).sendToBrowser( - 'Location Added', - 'Location %s has been created.' % newLocationPath + "Location Added", + "Location %s has been created." % newLocationPath, ) - audit('UI.Device.SetLocation', self, location=newLocationPath) + audit("UI.Device.SetLocation", self, location=newLocationPath) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setPerformanceMonitor') - def setPerformanceMonitor(self, performanceMonitor, - newPerformanceMonitor=None, REQUEST=None): + security.declareProtected(ZEN_CHANGE_DEVICE, "setPerformanceMonitor") + + def setPerformanceMonitor( + self, performanceMonitor, newPerformanceMonitor=None, REQUEST=None + ): """ Set the performance monitor for this device. If newPerformanceMonitor is passed in create it @@ -1536,31 +1775,46 @@ def setPerformanceMonitor(self, performanceMonitor, if self.getPerformanceServer() is not None: oldPerformanceMonitor = self.getPerformanceServer().getId() - self.getDmdRoot("Monitors").setPreviousCollectorForDevice(self.getId(), oldPerformanceMonitor) + self.getDmdRoot("Monitors").setPreviousCollectorForDevice( + self.getId(), oldPerformanceMonitor + ) collectorNotFound = False warning = None obj = self.getDmdRoot("Monitors").getPerformanceMonitor( - performanceMonitor) + performanceMonitor + ) if obj.viewName() != performanceMonitor: collectorNotFound = True - warning = ('Collector {} is not found. Performance monitor has been set to {}.'.format( - performanceMonitor, obj.viewName())) + warning = ( + "Collector {} is not found. " + "Performance monitor has been set to {}.".format( + performanceMonitor, obj.viewName() + ) + ) log.warn(warning) self.addRelation("perfServer", obj) self.setLastChange() notify(IndexingEvent(self)) if REQUEST: - message = 'Performance monitor has been set to {}.'.format(performanceMonitor) + message = "Performance monitor has been set to {}.".format( + performanceMonitor + ) if collectorNotFound: message = warning - messaging.IMessageSender(self).sendToBrowser('Monitor Changed', message) - audit('UI.Device.SetPerformanceMonitor', self, - performancemonitor=performanceMonitor) + messaging.IMessageSender(self).sendToBrowser( + "Monitor Changed", message + ) + audit( + "UI.Device.SetPerformanceMonitor", + self, + performancemonitor=performanceMonitor, + ) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setGroups') + security.declareProtected(ZEN_CHANGE_DEVICE, "setGroups") + def setGroups(self, groupPaths): """ Set the list of groups for this device based on a list of paths @@ -1569,9 +1823,10 @@ def setGroups(self, groupPaths): """ objGetter = self.getDmdRoot("Groups").createOrganizer self._setRelations("groups", objGetter, groupPaths) - notify(IndexingEvent(self, 'path', False)) + notify(IndexingEvent(self, "path", False)) + + security.declareProtected(ZEN_CHANGE_DEVICE, "addDeviceGroup") - security.declareProtected(ZEN_CHANGE_DEVICE, 'addDeviceGroup') def addDeviceGroup(self, newDeviceGroupPath, REQUEST=None): """ DEPRECATED? @@ -1584,13 +1839,14 @@ def addDeviceGroup(self, newDeviceGroupPath, REQUEST=None): self.addRelation("groups", group) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Group Added', - 'Group %s has been created.' % newDeviceGroupPath + "Group Added", + "Group %s has been created." % newDeviceGroupPath, ) - audit('UI.Device.AddToGroup', self, group=newDeviceGroupPath) + audit("UI.Device.AddToGroup", self, group=newDeviceGroupPath) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setSystems') + security.declareProtected(ZEN_CHANGE_DEVICE, "setSystems") + def setSystems(self, systemPaths): """ Set a list of systems to this device using their system paths @@ -1599,9 +1855,10 @@ def setSystems(self, systemPaths): """ objGetter = self.getDmdRoot("Systems").createOrganizer self._setRelations("systems", objGetter, systemPaths) - notify(IndexingEvent(self, 'path', False)) + notify(IndexingEvent(self, "path", False)) + + security.declareProtected(ZEN_CHANGE_DEVICE, "addSystem") - security.declareProtected(ZEN_CHANGE_DEVICE, 'addSystem') def addSystem(self, newSystemPath, REQUEST=None): """ DEPRECATED? @@ -1614,13 +1871,13 @@ def addSystem(self, newSystemPath, REQUEST=None): self.addRelation("systems", sys) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'System Added', - 'System %s has been created.' % newSystemPath + "System Added", "System %s has been created." % newSystemPath ) - audit('UI.Device.AddToSystem', self, system=newSystemPath) + audit("UI.Device.AddToSystem", self, system=newSystemPath) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_CHANGE_DEVICE, 'setTerminalServer') + security.declareProtected(ZEN_CHANGE_DEVICE, "setTerminalServer") + def setTerminalServer(self, termservername): """ Set the terminal server of this device @@ -1630,7 +1887,7 @@ def setTerminalServer(self, termservername): """ termserver = self.findDevice(termservername) if termserver: - self.addRelation('termserver', termserver) + self.addRelation("termserver", termserver) def _setRelations(self, relName, objGetter, relPaths): """ @@ -1641,16 +1898,18 @@ def _setRelations(self, relName, objGetter, relPaths): @param relPaths: list of relationship paths """ if not isinstance(relPaths, (list, tuple)): - relPaths = [relPaths,] + relPaths = [ + relPaths, + ] relPaths = filter(lambda x: x.strip(), relPaths) rel = getattr(self, relName, None) if not rel: - raise AttributeError( "Relation %s not found" % relName) + raise AttributeError("Relation %s not found" % relName) curRelIds = {} for value in rel.objectValuesAll(): curRelIds[value.getOrganizerName()] = value for path in relPaths: - if not path in curRelIds: + if path not in curRelIds: robj = objGetter(path) self.addRelation(relName, robj) else: @@ -1665,10 +1924,10 @@ def _getOtherExpandedLinks(self): @return a list of the html links supplied by implementers of the IExpandedLinkProvider subscriber interface """ - providers = subscribers( [self], IExpandedLinkProvider ) + providers = subscribers([self], IExpandedLinkProvider) expandedLinkList = [] for provider in providers: - expandedLinkList.extend( provider.getExpandedLinks() ) + expandedLinkList.extend(provider.getExpandedLinks()) return expandedLinkList def getExpandedLinks(self): @@ -1678,21 +1937,24 @@ def getExpandedLinks(self): @rtype: HTML output """ from Products.ZenUtils.ZenTales import talesEval + try: - linksHtml = talesEval('string:' + self.zLinks, self) + linksHtml = talesEval("string:" + self.zLinks, self) otherLinks = self._getOtherExpandedLinks() if otherLinks: - linksHtml += '
'.join(otherLinks) + linksHtml += "
".join(otherLinks) return linksHtml except Exception as ex: import cgi + return "%s" % cgi.escape(str(ex)) #################################################################### # Private getter functions that implement DeviceResultInt #################################################################### - security.declareProtected(ZEN_VIEW, 'device') + security.declareProtected(ZEN_VIEW, "device") + def device(self): """ Support DeviceResultInt mixin class. Returns itself @@ -1719,8 +1981,10 @@ def pastSnmpMaxFailures(self): return False # FIXME: cleanup --force option #2660 - security.declareProtected(ZEN_MANAGE_DEVICE_STATUS, - 'getLastPollSnmpUpTime') + security.declareProtected( + ZEN_MANAGE_DEVICE_STATUS, "getLastPollSnmpUpTime" + ) + def getLastPollSnmpUpTime(self): """ Get the value of the snmpUpTime status object @@ -1730,8 +1994,10 @@ def getLastPollSnmpUpTime(self): return self._lastPollSnmpUpTime.getStatus() # FIXME: cleanup --force option #2660 - security.declareProtected(ZEN_MANAGE_DEVICE_STATUS, - 'setLastPollSnmpUpTime') + security.declareProtected( + ZEN_MANAGE_DEVICE_STATUS, "setLastPollSnmpUpTime" + ) + def setLastPollSnmpUpTime(self, value): """ Set the value of the snmpUpTime status object @@ -1745,10 +2011,12 @@ def snmpAgeCheck(self, hours): Returns True if SNMP data was collected more than 24 hours ago """ lastcoll = self.getSnmpLastCollection() - hours = hours/24.0 - if DateTime() > lastcoll + hours: return 1 + hours = hours / 24.0 + if DateTime() > lastcoll + hours: + return 1 + + security.declareProtected(ZEN_CHANGE_DEVICE, "applyProductContext") - security.declareProtected(ZEN_CHANGE_DEVICE, 'applyProductContext') def applyProductContext(self): """ Apply zProperties inherited from Product Contexts. @@ -1780,9 +2048,17 @@ def _applyProdContext(self, context): # Management Functions #################################################################### - security.declareProtected(ZEN_MANAGE_DEVICE, 'collectDevice') - def collectDevice(self, setlog=True, REQUEST=None, generateEvents=False, - background=False, write=None, debug=False): + security.declareProtected(ZEN_MANAGE_DEVICE, "collectDevice") + + def collectDevice( + self, + setlog=True, + REQUEST=None, + generateEvents=False, + background=False, + write=None, + debug=False, + ): """ Collect the configuration of this device AKA Model Device @@ -1794,22 +2070,35 @@ def collectDevice(self, setlog=True, REQUEST=None, generateEvents=False, xmlrpc = isXmlRpc(REQUEST) perfConf = self.getPerformanceServer() if perfConf is None: - msg = "Device %s in unknown state -- remove and remodel" % self.titleOrId() + msg = ( + "Device %s in unknown state -- remove and remodel" + % self.titleOrId() + ) if write is not None: write(msg) log.error("Unable to get collector info: %s", msg) - if xmlrpc: return 1 + if xmlrpc: + return 1 return - perfConf.collectDevice(self, setlog, REQUEST, generateEvents, - background, write, collectPlugins='', - debug=debug) + perfConf.collectDevice( + self, + setlog, + REQUEST, + generateEvents, + background, + write, + collectPlugins="", + debug=debug, + ) if REQUEST: - audit('UI.Device.Remodel', self) - if xmlrpc: return 0 + audit("UI.Device.Remodel", self) + if xmlrpc: + return 0 + + security.declareProtected(ZEN_MANAGE_DEVICE, "runDeviceMonitor") - security.declareProtected(ZEN_MANAGE_DEVICE, 'collectDevice') def runDeviceMonitor(self, REQUEST=None, write=None, debug=False): """ Run monitoring daemon agains the device ones @@ -1818,11 +2107,15 @@ def runDeviceMonitor(self, REQUEST=None, write=None, debug=False): xmlrpc = isXmlRpc(REQUEST) perfConf = self.getPerformanceServer() if perfConf is None: - msg = "Device %s in unknown state -- remove and remodel" % self.titleOrId() + msg = ( + "Device %s in unknown state -- remove and remodel" + % self.titleOrId() + ) if write is not None: write(msg) log.error("Unable to get collector info: %s", msg) - if xmlrpc: return 1 + if xmlrpc: + return 1 return # Getting all the datasources from template signed to that @@ -1834,7 +2127,7 @@ def runDeviceMonitor(self, REQUEST=None, write=None, debug=False): collection_daemons = set() for ds in datasources: if isPythonDataSource(ds): - daemon = _sourcetype_to_collector_map['Python'] + daemon = _sourcetype_to_collector_map["Python"] else: daemon = _sourcetype_to_collector_map.get(ds.sourcetype) if daemon: @@ -1844,54 +2137,111 @@ def runDeviceMonitor(self, REQUEST=None, write=None, debug=False): # zenpython; zenperfsnmp; zencommand if not collection_daemons: if write: - write('Monitoring through UI only support COMMAND, ' - 'SNMP and ZenPython type of datasources') - if xmlrpc: return 1 + write( + "Monitoring through UI only support COMMAND, " + "SNMP and ZenPython type of datasources" + ) + if xmlrpc: + return 1 return # Pass collection_daemons as a list because perfConf.runDeviceMonitor # was written expecting that parameter to be a list. perfConf.runDeviceMonitor( - self, REQUEST, write, list(collection_daemons), debug=debug, + self, + REQUEST, + write, + list(collection_daemons), + debug=debug, ) if REQUEST: - audit('UI.Device.Monitor', self) - if xmlrpc: return 0 + audit("UI.Device.Monitor", self) + if xmlrpc: + return 0 + + security.declareProtected(ZEN_MANAGE_DEVICE, "monitorPerDatasource") - security.declareProtected(ZEN_MANAGE_DEVICE, 'collectDevice') def monitorPerDatasource(self, dsObj, REQUEST=None, write=None): """ Run monitoring daemon against one device and one datasource ones """ - parameter = '--datasource' - value = '%s/%s' % (dsObj.rrdTemplate.obj.id, dsObj.id) + parameter = "--datasource" + value = "%s/%s" % (dsObj.rrdTemplate.obj.id, dsObj.id) if isPythonDataSource(dsObj): - collection_daemon = _sourcetype_to_collector_map['Python'] - elif dsObj.sourcetype == 'COMMAND': - collection_daemon = _sourcetype_to_collector_map['COMMAND'] - elif dsObj.sourcetype == 'SNMP': - collection_daemon = _sourcetype_to_collector_map['SNMP'] - parameter = '--oid' + collection_daemon = _sourcetype_to_collector_map["Python"] + elif dsObj.sourcetype == "COMMAND": + collection_daemon = _sourcetype_to_collector_map["COMMAND"] + elif dsObj.sourcetype == "SNMP": + collection_daemon = _sourcetype_to_collector_map["SNMP"] + parameter = "--oid" value = dsObj.oid else: - collection_daemon = '' + collection_daemon = "" xmlrpc = isXmlRpc(REQUEST) perfConf = self.getPerformanceServer() if not collection_daemon: if write: - write('Modeling through UI only support COMMAND, ' - 'SNMP and ZenPython type of datasources') - if xmlrpc: return 1 + write( + "Modeling through UI only support COMMAND, " + "SNMP and ZenPython type of datasources" + ) + if xmlrpc: + return 1 return - perfConf.runDeviceMonitorPerDatasource(self, REQUEST, write, - collection_daemon, parameter, - value) - if xmlrpc: return 0 - - security.declareProtected(ZEN_DELETE_DEVICE, 'deleteDevice') - def deleteDevice(self, deleteStatus=False, deleteHistory=False, - deletePerf=False, REQUEST=None): + perfConf.runDeviceMonitorPerDatasource( + self, REQUEST, write, collection_daemon, parameter, value + ) + if xmlrpc: + return 0 + + def _removeManageIp(self): + """ + Safely remove the manageIp object from the device. Remove the + ipaddress object from the device:ipaddress relation, and if the + ipaddress does not have any remaining relations, remove the + ipaddress object. + """ + deviceId = self.titleOrId() + manageIp = self.manageIp + ipaddr = self.ipaddress() + log.debug('Set manageIp on {} to empty string'.format(deviceId)) + if ipaddr: + ipaddrString = ipaddr.id + log.debug('Removing ipaddress/manageDevice relation from {} to {}'.format(deviceId, str(ipaddr))) + if ipaddrString != manageIp: + # Shouldn't happen, but manageIp is not the + # IP set on the device. + log.warn('Device {} has a mismatch between manageIp({})' + 'and device ip ({})'.format( + deviceId, manageIp, ipaddrString + ) + ) + # Remove the ip from the ipaddress relation on the device + self.ipaddress.removeRelation(ipaddr) + # removeIpAddresses will only remove IP addresses that are no longer + # attached to any device, so it's safe to call at this point + netFacade = getFacade('network', self.dmd) + ips = [ipaddr.getPrimaryId()] + log.debug('Removing IP address obj {} if no longer used'.format(ipaddrString)) + removeCount, errorCount = netFacade.removeIpAddresses(ips) + if errorCount: + # This most likely means that the IP address is still + # attached to a different device + log.warn('Could not remove ' + 'IP address {}'.format(ipaddrString)) + # Set the manageIp to blank + self.manageIp = '' + + security.declareProtected(ZEN_DELETE_DEVICE, "deleteDevice") + + def deleteDevice( + self, + deleteStatus=False, + deleteHistory=False, + deletePerf=False, + REQUEST=None, + ): """ Delete device from the database @@ -1907,25 +2257,37 @@ def deleteDevice(self, deleteStatus=False, deleteHistory=False, parent = self.getPrimaryParent() if deleteStatus: # Close events for this device - zep = getFacade('zep') - tagFilter = { 'tag_uuids': [IGlobalIdentifier(self).getGUID()] } - eventFilter = { 'tag_filter': [ tagFilter ] } + zep = getFacade("zep") + tagFilter = {"tag_uuids": [IGlobalIdentifier(self).getGUID()]} + eventFilter = {"tag_filter": [tagFilter]} log.debug("Closing events for device: %s", self.getId()) zep.closeEventSummaries(eventFilter=eventFilter) if REQUEST: - audit('UI.Device.Delete', self, deleteStatus=deleteStatus, - deleteHistory=deleteHistory, deletePerf=deletePerf) - self.getDmdRoot("Monitors").deletePreviousCollectorForDevice(self.getId()) - self.dmd.getDmdRoot("ZenLinkManager").remove_device_from_cache(self.getId()) + audit( + "UI.Device.Delete", + self, + deleteStatus=deleteStatus, + deleteHistory=deleteHistory, + deletePerf=deletePerf, + ) + self.getDmdRoot("Monitors").deletePreviousCollectorForDevice( + self.getId() + ) + self.dmd.getDmdRoot("ZenLinkManager").remove_device_from_cache( + self.getId() + ) + self._removeManageIp() parent._delObject(self.getId()) if REQUEST: - if parent.getId()=='devices': + if parent.getId() == "devices": parent = parent.getPrimaryParent() - REQUEST['RESPONSE'].redirect(parent.absolute_url_path() + - "/deviceOrganizerStatus" - '?message=Device deleted') + REQUEST["RESPONSE"].redirect( + parent.absolute_url_path() + "/deviceOrganizerStatus" + "?message=Device deleted" + ) + + security.declareProtected(ZEN_ADMIN_DEVICE, "renameDevice") - security.declareProtected(ZEN_ADMIN_DEVICE, 'renameDevice') def renameDevice(self, newId=None, REQUEST=None, retainGraphData=False): """ Rename device from the DMD. Disallow assignment of @@ -1956,17 +2318,18 @@ def renameDevice(self, newId=None, REQUEST=None, retainGraphData=False): newId = newId.strip() - if newId == '' or newId == oldId: + if newId == "" or newId == oldId: return path device = self.dmd.Devices.findDeviceByIdExact(newId) if device: raise DeviceExistsError( - 'Device already exists with id %s' % newId, device, + "Device already exists with id %s" % newId, + device, ) if REQUEST: - audit('UI.Device.ChangeId', self, id=newId) + audit("UI.Device.ChangeId", self, id=newId) # side effect: self.getId() will return newId after this call try: @@ -1995,25 +2358,29 @@ def reassociatePerfDataAfterRename(self, oldId, newId): self.dmd.JobManager.addJob( FacadeMethodJob, description=( - 'Reassociating performance data for device {} with ' - 'new ID {}'.format(oldId, newId) + "Reassociating performance data for device {} with " + "new ID {}".format(oldId, newId) ), kwargs=dict( - facadefqdn='Products.Zuul.facades.metricfacade.MetricFacade', - method='renameDevice', + facadefqdn="Products.Zuul.facades.metricfacade.MetricFacade", + method="renameDevice", oldId=oldId, - newId=newId - ) + newId=newId, + ), ) - security.declareProtected(ZEN_CHANGE_DEVICE, 'index_object') + security.declareProtected(ZEN_CHANGE_DEVICE, "index_object") + + @deprecated def index_object(self, idxs=None, noips=False): """ Override so ips get indexed on move. DEPRECATED """ pass - security.declareProtected(ZEN_CHANGE_DEVICE, 'unindex_object') + security.declareProtected(ZEN_CHANGE_DEVICE, "unindex_object") + + @deprecated def unindex_object(self): """ Override so ips get unindexed as well. DEPRECATED @@ -2033,7 +2400,12 @@ def getUserCommandEnvironment(self): """ environ = Commandable.getUserCommandEnvironment(self) context = self.primaryAq() - environ.update({'dev': context, 'device': context,}) + environ.update( + { + "dev": context, + "device": context, + } + ) return environ def getUrlForUserCommands(self): @@ -2041,7 +2413,7 @@ def getUrlForUserCommands(self): Returns a URL to redirect to after a command has executed used by Commandable """ - return self.getPrimaryUrlPath() + '/deviceManagement' + return self.getPrimaryUrlPath() + "/deviceManagement" def getHTMLEventSummary(self, severity=4): """ @@ -2050,28 +2422,41 @@ def getHTMLEventSummary(self, severity=4): html = [] html.append("") html.append("") + def evsummarycell(ev): - if ev[1]-ev[2]>=0: klass = '%s empty thin' % ev[0] - else: klass = '%s thin' % ev[0] + if ev[1] - ev[2] >= 0: + klass = "%s empty thin" % ev[0] + else: + klass = "%s thin" % ev[0] h = '' % ( - klass, ev[1], ev[2]) + klass, + ev[1], + ev[2], + ) return h + info = self.getEventSummary(severity) html += map(evsummarycell, info) - html.append('
%s/%s
') - return '\n'.join(html) + html.append("") + return "\n".join(html) def getDataForJSON(self, minSeverity=0): """ Returns data ready for serialization """ - url, classurl = map(urlquote, - (self.getDeviceUrl(), self.getDeviceClassPath())) + url, classurl = map( + urlquote, (self.getDeviceUrl(), self.getDeviceClassPath()) + ) id = '%s' % ( - url, self.titleOrId()) + url, + self.titleOrId(), + ) ip = self.getDeviceIp() if self.checkRemotePerm(ZEN_VIEW, self.deviceClass()): - path = '%s' % (classurl,classurl) + path = '%s' % ( + classurl, + classurl, + ) else: path = classurl prod = self.getProdState() @@ -2084,24 +2469,8 @@ def exportXmlHook(self, ofile, ignorerels): """ map(lambda o: o.exportXml(ofile, ignorerels), (self.hw, self.os)) - def zenPropertyOptions(self, propname): - """ - Returns a list of possible options for a given zProperty - """ - if propname == 'zCollectorPlugins': - from Products.DataCollector.Plugins import loadPlugins - return sorted(ldr.pluginName for ldr in loadPlugins(self.dmd)) - if propname == 'zCommandProtocol': - return ['ssh', 'telnet'] - if propname == 'zSnmpVer': - return ['v1', 'v2c', 'v3'] - if propname == 'zSnmpAuthType': - return ['', 'MD5', 'SHA'] - if propname == 'zSnmpPrivType': - return ['', 'DES', 'AES'] - return ManagedEntity.zenPropertyOptions(self, propname) + security.declareProtected(ZEN_MANAGE_DEVICE, "pushConfig") - security.declareProtected(ZEN_MANAGE_DEVICE, 'pushConfig') def pushConfig(self, REQUEST=None): """ This will result in a push of all the devices to live collectors @@ -2111,25 +2480,29 @@ def pushConfig(self, REQUEST=None): self._p_changed = True if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Changes Pushed', - 'Changes to %s pushed to collectors.' % self.id + "Changes Pushed", + "Changes to %s pushed to collectors." % self.id, ) - audit('UI.Device.PushChanges', self) + audit("UI.Device.PushChanges", self) return self.callZenScreen(REQUEST) - security.declareProtected(ZEN_EDIT_LOCAL_TEMPLATES, 'bindTemplates') + security.declareProtected(ZEN_EDIT_LOCAL_TEMPLATES, "bindTemplates") + def bindTemplates(self, ids=(), REQUEST=None): """ This will bind available templates to the zDeviceTemplates @permission: ZEN_EDIT_LOCAL_TEMPLATES """ - result = self.setZenProperty('zDeviceTemplates', ids, REQUEST) + result = self.setZenProperty("zDeviceTemplates", ids, REQUEST) if REQUEST: - audit('UI.Device.BindTemplates', self, templates=ids) + audit("UI.Device.BindTemplates", self, templates=ids) return result - security.declareProtected(ZEN_EDIT_LOCAL_TEMPLATES, 'removeZDeviceTemplates') + security.declareProtected( + ZEN_EDIT_LOCAL_TEMPLATES, "removeZDeviceTemplates" + ) + def removeZDeviceTemplates(self, REQUEST=None): """ Deletes the local zProperty, zDeviceTemplates @@ -2139,14 +2512,19 @@ def removeZDeviceTemplates(self, REQUEST=None): for id in self.zDeviceTemplates: self.removeLocalRRDTemplate(id) if REQUEST: - audit('UI.Device.RemoveLocalTemplate', self, template=id) - from Products.ZenRelations.ZenPropertyManager import ZenPropertyDoesNotExist + audit("UI.Device.RemoveLocalTemplate", self, template=id) + from Products.ZenRelations.ZenPropertyManager import ( + ZenPropertyDoesNotExist, + ) + try: - return self.deleteZenProperty('zDeviceTemplates', REQUEST) + return self.deleteZenProperty("zDeviceTemplates", REQUEST) except ZenPropertyDoesNotExist: - if REQUEST: return self.callZenScreen(REQUEST) + if REQUEST: + return self.callZenScreen(REQUEST) + + security.declareProtected(ZEN_EDIT_LOCAL_TEMPLATES, "addLocalTemplate") - security.declareProtected(ZEN_EDIT_LOCAL_TEMPLATES, 'addLocalTemplate') def addLocalTemplate(self, id, REQUEST=None): """ Create a local template on a device @@ -2154,15 +2532,20 @@ def addLocalTemplate(self, id, REQUEST=None): @permission: ZEN_EDIT_LOCAL_TEMPLATES """ from Products.ZenModel.RRDTemplate import manage_addRRDTemplate + manage_addRRDTemplate(self, id) - if id not in self.zDeviceTemplates: - self.bindTemplates(self.zDeviceTemplates+[id]) + if ( + id not in self.zDeviceTemplates + and not id.endswith("-replacement") + and not id.endswith("-addition") + ): + self.bindTemplates(self.zDeviceTemplates + [id]) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Local Template Added', - 'Added template %s to %s.' % (id, self.id) + "Local Template Added", + "Added template %s to %s." % (id, self.id), ) - audit('UI.Device.AddLocalTemplate', self, template=id) + audit("UI.Device.AddLocalTemplate", self, template=id) return self.callZenScreen(REQUEST) def getAvailableTemplates(self): @@ -2170,32 +2553,45 @@ def getAvailableTemplates(self): Returns all available templates for this device """ # All templates defined on this device are available - templates = self.objectValues('RRDTemplate') + templates = self.objectValues("RRDTemplate") # Any templates available to the class that aren't overridden locally # are also available device_template_ids = set(t.id for t in templates) - templates.extend(t for t in self.deviceClass().getRRDTemplates() - if t.id not in device_template_ids) - - # filter before sorting - templates = filter(lambda t: isinstance(self, t.getTargetPythonClass()), templates) + templates.extend( + t + for t in self.deviceClass().getRRDTemplates() + if t.id not in device_template_ids + ) + # Filter out any templates that have been 'replaced' + filteredTemplates = list(templates) + for t in templates: + tName = t.titleOrId() + if tName.endswith("-replacement") or tName.endswith("-addition"): + filteredTemplates.remove(t) + # filter for python class before sorting + templates = filter( + lambda t: isinstance(self, t.getTargetPythonClass()), + filteredTemplates, + ) return sorted(templates, key=lambda x: x.id.lower()) def getSnmpV3EngineId(self): - return self.getProperty('zSnmpEngineId') + return self.getProperty("zSnmpEngineId") def setSnmpV3EngineId(self, value): - self.setZenProperty('zSnmpEngineId', value) + self.setZenProperty("zSnmpEngineId", value) + + security.declareProtected(ZEN_VIEW, "getLinks") - security.declareProtected(ZEN_VIEW, 'getLinks') - def getLinks(self, OSI_layer='3'): + def getLinks(self, OSI_layer="3"): """ Returns all Links on this Device's interfaces @permission: ZEN_VIEW """ - if OSI_layer=='3': + if OSI_layer == "3": from Products.ZenUtils.NetworkTree import getDeviceNetworkLinks + for link in getDeviceNetworkLinks(self): yield link else: @@ -2203,17 +2599,21 @@ def getLinks(self, OSI_layer='3'): for link in iface.links.objectValuesGen(): yield link - security.declareProtected(ZEN_VIEW, 'getXMLEdges') + security.declareProtected(ZEN_VIEW, "getXMLEdges") + def getXMLEdges(self, depth=3, filter="/", start=()): """ Gets XML """ - if not start: start=self.id - edges = NetworkTree.get_edges(self, depth, - withIcons=True, filter=filter) + if not start: + start = self.id + edges = NetworkTree.get_edges( + self, depth, withIcons=True, filter=filter + ) return edgesToXML(edges, start) - security.declareProtected(ZEN_VIEW, 'getPrettyLink') + security.declareProtected(ZEN_VIEW, "getPrettyLink") + @unpublished def getPrettyLink(self, target=None, altHref=""): """ @@ -2222,9 +2622,11 @@ def getPrettyLink(self, target=None, altHref=""): @rtype: HTML text @permission: ZEN_VIEW """ - template = ("
" - " " - "
%s") + template = ( + "
" + " " + "
%s" + ) icon = self.getIconPath() href = altHref if altHref else self.getPrimaryUrlPath() name = self.titleOrId() @@ -2234,8 +2636,11 @@ def getPrettyLink(self, target=None, altHref=""): if not self.checkRemotePerm(ZEN_VIEW, self): return rendered else: - return "%s" % \ - ('target=' + target if target else '', href, rendered) + return "%s" % ( + "target=" + target if target else "", + href, + rendered, + ) def osProcessClassMatchData(self): """ @@ -2244,14 +2649,16 @@ def osProcessClassMatchData(self): """ matchers = [] for pc in self.getDmdRoot("Processes").getSubOSProcessClassesSorted(): - matchers.append({ - 'includeRegex': pc.includeRegex, - 'excludeRegex': pc.excludeRegex, - 'replaceRegex': pc.replaceRegex, - 'replacement': pc.replacement, - 'primaryUrlPath': pc.getPrimaryUrlPath(), - 'primaryDmdId': pc.getPrimaryDmdId(), - }) + matchers.append( + { + "includeRegex": pc.includeRegex, + "excludeRegex": pc.excludeRegex, + "replaceRegex": pc.replaceRegex, + "replacement": pc.replacement, + "primaryUrlPath": pc.getPrimaryUrlPath(), + "primaryDmdId": pc.getPrimaryDmdId(), + } + ) return matchers @@ -2261,6 +2668,7 @@ def manageIpVersion(self): of the manageIp ip adddress """ from ipaddr import IPAddress + try: ip = self.getManageIp() return IPAddress(ip).version @@ -2272,8 +2680,9 @@ def manageIpVersion(self): def snmpwalkPrefix(self): """ - This method gets the ip address prefix used for this device when running - snmpwalk. + This method gets the ip address prefix used for this device + when running snmpwalk. + @rtype: string @return: Prefix used for snmwalk for this device """ @@ -2297,7 +2706,8 @@ def tracerouteCommand(self): Used by the user commands this returns which traceroute command this device should use. @rtype: string - @return "traceroute" or "traceroute6" depending on if the manageIp is ipv6 or not + @return "traceroute" or "traceroute6" depending on if the + manageIp is ipv6 or not. """ if self.manageIpVersion() == 6: return "traceroute6" @@ -2313,17 +2723,22 @@ def getStatus(self, statusclass=None, **kwargs): if statusclass is None: statusclass = self.zStatusEventClass - zep = getFacade('zep', self) + zep = getFacade("zep", self) try: event_filter = zep.createEventFilter( tags=[self.getUUID()], element_sub_identifier=[""], severity=[SEVERITY_CRITICAL], - status=[STATUS_NEW, STATUS_ACKNOWLEDGED, STATUS_SUPPRESSED], - event_class=filter(None, [self.zStatusEventClass])) + status=[ + STATUS_NEW, + STATUS_ACKNOWLEDGED, + STATUS_SUPPRESSED, + ], + event_class=filter(None, [self.zStatusEventClass]), + ) result = zep.getEventSummaries(0, filter=event_filter, limit=0) - return int(result['total']) + return int(result["total"]) except Exception: return None @@ -2336,32 +2751,38 @@ def _getPingStatus(self, statusclass): if not self.zPingMonitorIgnore and self.getManageIp(): # Override normal behavior - we only care if the manage IP is down - # need to add the ipinterface component id to search since we may be - # pinging interfaces and only care about status of the one that + # Need to add the ipinterface component id to search since we may + # be pinging interfaces and only care about status of the one that # matches the manage ip. This is potentially expensive element_sub_identifier = [""] - ifaces = self.getDeviceComponents(type='IpInterface') + ifaces = self.getDeviceComponents(type="IpInterface") for iface in ifaces: - if self.manageIp in [ip.partition("/")[0] for ip in iface.getIpAddresses()]: + if self.manageIp in [ + ip.partition("/")[0] for ip in iface.getIpAddresses() + ]: element_sub_identifier.append(iface.id) break - zep = getFacade('zep', self) - event_filter = zep.createEventFilter(tags=[self.getUUID()], - severity=[SEVERITY_WARNING,SEVERITY_ERROR,SEVERITY_CRITICAL], - status=[STATUS_NEW,STATUS_ACKNOWLEDGED, STATUS_SUPPRESSED], - element_sub_identifier=element_sub_identifier, - event_class=filter(None, [statusclass]), - details={EventProxy.DEVICE_IP_ADDRESS_DETAIL_KEY: self.getManageIp()}) + zep = getFacade("zep", self) + event_filter = zep.createEventFilter( + tags=[self.getUUID()], + severity=[SEVERITY_WARNING, SEVERITY_ERROR, SEVERITY_CRITICAL], + status=[STATUS_NEW, STATUS_ACKNOWLEDGED, STATUS_SUPPRESSED], + element_sub_identifier=element_sub_identifier, + event_class=filter(None, [statusclass]), + details={ + EventProxy.DEVICE_IP_ADDRESS_DETAIL_KEY: self.getManageIp() + }, + ) result = zep.getEventSummaries(0, filter=event_filter, limit=0) - return int(result['total']) + return int(result["total"]) else: return None def ipAddressAsInt(self): ip = self.getManageIp() if ip: - ip = ip.partition('/')[0] + ip = ip.partition("/")[0] if ip: return str(numbip(ip)) diff --git a/Products/ZenModel/DeviceClass.py b/Products/ZenModel/DeviceClass.py index 8df023379b..418fb9c2b4 100644 --- a/Products/ZenModel/DeviceClass.py +++ b/Products/ZenModel/DeviceClass.py @@ -832,29 +832,6 @@ def buildDeviceTreeProperties(self): if not devs.hasProperty(id): devs._setProperty(id, defaultValue, type) - def zenPropertyOptions(self, propname): - """ - Provide a set of default options for a zProperty - - @param propname: zProperty name - @type propname: string - @return: list of zProperty options - @rtype: list - """ - if propname == 'zCollectorPlugins': - from Products.DataCollector.Plugins import loadPlugins - return sorted(ldr.pluginName for ldr in loadPlugins(self.dmd)) - if propname == 'zCommandProtocol': - return ['ssh', 'telnet'] - if propname == 'zSnmpVer': - return ['v1', 'v2c', 'v3'] - if propname == 'zSnmpAuthType': - return ['', 'MD5', 'SHA'] - if propname == 'zSnmpPrivType': - return ['', 'DES', 'AES'] - return DeviceOrganizer.zenPropertyOptions(self, propname) - - def pushConfig(self, REQUEST=None): """ This will result in a push of all the devices to live collectors diff --git a/Products/ZenModel/DeviceHW.py b/Products/ZenModel/DeviceHW.py index c64c876cb5..f4b566e5cb 100644 --- a/Products/ZenModel/DeviceHW.py +++ b/Products/ZenModel/DeviceHW.py @@ -25,7 +25,7 @@ class DeviceHW(Hardware): meta_type = "DeviceHW" - totalMemory = 0L + totalMemory = 0 _properties = Hardware._properties + ( {'id':'totalMemory', 'type':'long', 'mode':'w'}, diff --git a/Products/ZenModel/FileSystem.py b/Products/ZenModel/FileSystem.py index 705fd83181..304504ba05 100644 --- a/Products/ZenModel/FileSystem.py +++ b/Products/ZenModel/FileSystem.py @@ -57,8 +57,8 @@ class FileSystem(OSComponent): storageDevice = "" type = "" blockSize = 0 - totalBlocks = 0L - totalFiles = 0L + totalBlocks = 0 + totalFiles = 0 capacity = 0 inodeCapacity = 0 maxNameLen = 0 diff --git a/Products/ZenModel/IpAddress.py b/Products/ZenModel/IpAddress.py index ae04197dc7..1379dfcbd0 100644 --- a/Products/ZenModel/IpAddress.py +++ b/Products/ZenModel/IpAddress.py @@ -104,6 +104,11 @@ def __init__(self, id, netmask=24): self.title = ipunwrap(id) self.version = ipobj.version + def _pre_remove(self): + self.manageDevice.removeRelation() + self.interface.removeRelation() + self.clientroutes.removeRelation() + def setPtrName(self): try: data = socket.gethostbyaddr(ipunwrap(self.id)) @@ -284,6 +289,8 @@ def get_indexable_peers(self): peers = [] if self.device(): peers.append(self.device().primaryAq()) + if self.manageDevice(): + peers.append(self.manageDevice().primaryAq()) if self.interface(): peers.append(self.interface().primaryAq()) return peers diff --git a/Products/ZenModel/IpInterface.py b/Products/ZenModel/IpInterface.py index 977dfd4d5f..8a3f9180cc 100644 --- a/Products/ZenModel/IpInterface.py +++ b/Products/ZenModel/IpInterface.py @@ -207,6 +207,11 @@ def before_object_deleted_handler(self): notify(IndexingEvent(device, idxs=('macAddresses',), update_metadata=False)) except KeyError: pass + if device._operation != 1: + if self.ipaddresses(): + self.ipaddresses.removeRelation() + if self.iproutes(): + self.iproutes.removeRelation() def object_added_handler(self): self._update_device_macs(self.device(), self.macaddress) @@ -574,9 +579,25 @@ def getRRDTemplates(self): order.insert(0, templateName) for name in order: + templates = [] template = self.getRRDTemplateByName(name) - if template: - return [template] + if not template: + continue + replacement = self.getRRDTemplateByName( + '{}-replacement'.format(name)) + + if replacement and replacement not in templates: + templates.append(replacement) + else: + templates.append(template) + + addition = self.getRRDTemplateByName( + '{}-addition'.format(name)) + + if addition and addition not in templates: + templates.append(addition) + if templates: + return templates return [] diff --git a/Products/ZenModel/IpNetwork.py b/Products/ZenModel/IpNetwork.py index 0809a197b7..653752cb84 100644 --- a/Products/ZenModel/IpNetwork.py +++ b/Products/ZenModel/IpNetwork.py @@ -33,6 +33,7 @@ from Products.ZenModel.ZenossSecurity import * from Products.ZenModel.interfaces import IObjectEventsSubscriber +from Products.Jobber.zenjobs import app from Products.ZenUtils.IpUtil import * from Products.ZenRelations.RelSchema import * from IpAddress import IpAddress @@ -624,7 +625,7 @@ def primarySortKey(self): >>> net = dmd.Networks.addSubNetwork('1.2.3.0', 24) >>> net.primarySortKey() - 16909056L + 16909056 """ return numbip(self.id) @@ -652,6 +653,12 @@ def addSubNetwork(self, ip, netmask=24): lambda n: isinstance(n, IpAddress) and n.interface(), netobj.getSubObjects()) for i in ips: i.interface().ipaddresses._setObject(i.id, i) + mips = filter( + lambda n: isinstance(n, IpAddress) and n.manageDevice(), + netobj.getSubObjects(), + ) + for i in mips: + i.manageDevice().ipaddress.addRelation(i) return self.getSubNetwork(ip) @@ -933,7 +940,8 @@ class AutoDiscoveryJob(SubprocessJob): specifying IP ranges, not both. Also accepts a set of zProperties to be set on devices that are discovered. """ - def _run(self, nets=(), ranges=(), zProperties=(), collector='localhost'): + name = 'AutoDiscoveryJob' + def _run(self, nets=(), ranges=(), zProperties={}, collector='localhost'): # Store the nets and ranges self.nets = nets self.ranges = ranges @@ -967,6 +975,8 @@ def _run(self, nets=(), ranges=(), zProperties=(), collector='localhost'): SubprocessJob._run(self, cmd) +app.register_task(AutoDiscoveryJob) + class IpNetworkPrinter(object): def __init__(self, out): diff --git a/Products/ZenModel/IpRouteEntry.py b/Products/ZenModel/IpRouteEntry.py index c26a2f430f..e90fafe2dc 100644 --- a/Products/ZenModel/IpRouteEntry.py +++ b/Products/ZenModel/IpRouteEntry.py @@ -22,6 +22,8 @@ from Products.ZenUtils.Utils import localIpCheck, prepId from Products.ZenRelations.RelSchema import * +from Products.ZenModel.interfaces import IObjectEventsSubscriber +from zope.interface import implements from OSComponent import OSComponent @@ -61,6 +63,8 @@ class IpRouteEntry(OSComponent): """ IpRouteEntry object """ + + implements(IObjectEventsSubscriber) meta_type = 'IpRouteEntry' @@ -106,6 +110,16 @@ class IpRouteEntry(OSComponent): security = ClassSecurityInfo() ipcheck = re.compile(r'^127\.|^0\.0\.|^169\.254\.|^224\.|^::1$|^fe80:|^ff').search + + def before_object_deleted_handler(self): + device = self.device() + if device and device._operation != 1: + if self.interface(): + self.interface.removeRelation() + if self.nexthop(): + self.nexthop.removeRelation() + if self.target(): + self.target.removeRelation() def __getattr__(self, name): """ diff --git a/Products/ZenModel/MaintenanceWindow.py b/Products/ZenModel/MaintenanceWindow.py index dba95f7588..afe09c35b5 100644 --- a/Products/ZenModel/MaintenanceWindow.py +++ b/Products/ZenModel/MaintenanceWindow.py @@ -176,7 +176,7 @@ def niceStartDate(self): def niceStartDateTime(self): "Return start time as a string with nice sort qualities" - return "%s %s" % (Time.convertTimestampToTimeZone(self.start, self.timezone), self.timezone) + return "%s %s" % (datetime.fromtimestamp(self.start, self.tzInstance).strftime("%Y/%m/%d %H:%M:%S"), self.timezone) def niceStartProductionState(self): "Return a string version of the startProductionState" diff --git a/Products/ZenModel/MetricMixin.py b/Products/ZenModel/MetricMixin.py index a5f8401be9..faea727a6b 100644 --- a/Products/ZenModel/MetricMixin.py +++ b/Products/ZenModel/MetricMixin.py @@ -139,10 +139,20 @@ def snmpIgnore(self): return False def getRRDTemplates(self): - default = self.getRRDTemplateByName(self.getRRDTemplateName()) + templates = [] + defaultName = self.getRRDTemplateName() + default = self.getRRDTemplateByName(defaultName) if not default: return [] - return [default] + replacement = self.getRRDTemplateByName('{}-replacement'.format(defaultName)) + if replacement: + templates.append(replacement) + else: + templates.append(default) + addition = self.getRRDTemplateByName('{}-addition'.format(defaultName)) + if addition: + templates.append(addition) + return templates def getRRDTemplate(self): try: @@ -335,7 +345,6 @@ def fetchRRDValues(self, dpnames, cf, resolution, start, end="now"): results = [] if isinstance(dpnames, basestring): dpnames = [dpnames] - facade = getFacade('metric', self.dmd) # parse start and end into unix timestamps start, end = self._rrdAtTimeToUnix(start, end) diff --git a/Products/ZenModel/MibBase.py b/Products/ZenModel/MibBase.py index 32123b02dd..4f7e20e794 100644 --- a/Products/ZenModel/MibBase.py +++ b/Products/ZenModel/MibBase.py @@ -1,22 +1,24 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from __future__ import absolute_import -from zope.interface import implements +from zope.interface import implementer -from Products.ZenModel.interfaces import IIndexed -from ZenModelRM import ZenModelRM -from ZenPackable import ZenPackable +from .interfaces import IIndexed +from .ZenModelRM import ZenModelRM +from .ZenPackable import ZenPackable + +@implementer(IIndexed) class MibBase(ZenModelRM, ZenPackable): - implements(IIndexed) - default_catalog = 'mibSearch' + default_catalog = "mibSearch" _relations = ZenPackable._relations[:] @@ -27,29 +29,28 @@ class MibBase(ZenModelRM, ZenPackable): description = "" _properties = ( - {'id':'moduleName', 'type':'string', 'mode':'w'}, - {'id':'nodetype', 'type':'string', 'mode':'w'}, - {'id':'oid', 'type':'string', 'mode':'w'}, - {'id':'status', 'type':'string', 'mode':'w'}, - {'id':'description', 'type':'string', 'mode':'w'}, + {"id": "moduleName", "type": "string", "mode": "w"}, + {"id": "nodetype", "type": "string", "mode": "w"}, + {"id": "oid", "type": "string", "mode": "w"}, + {"id": "status", "type": "string", "mode": "w"}, + {"id": "description", "type": "string", "mode": "w"}, ) - def __init__(self, id, title="", **kwargs): super(ZenModelRM, self).__init__(id, title) atts = self.propertyIds() for key, val in kwargs.items(): - if key in atts: setattr(self, key, val) - + if key in atts: + setattr(self, key, val) def getFullName(self): - """Return full value name in form MODULE::attribute. - """ + """Return full value name in form MODULE::attribute.""" return "%s::%s" % (self.moduleName, self.id) - def summary(self): - """Return summary string for Mib objects. - """ - return [str(getattr(self, p)) for p in self.propertyIds() \ - if str(getattr(self, p))] + """Return summary string for Mib objects.""" + return [ + str(getattr(self, p)) + for p in self.propertyIds() + if str(getattr(self, p)) + ] diff --git a/Products/ZenModel/MibModule.py b/Products/ZenModel/MibModule.py index 22ada4ded2..4d3aedb353 100644 --- a/Products/ZenModel/MibModule.py +++ b/Products/ZenModel/MibModule.py @@ -7,19 +7,21 @@ # ############################################################################## +from __future__ import absolute_import +from AccessControl import ClassSecurityInfo, Permissions from AccessControl.class_init import InitializeClass -from AccessControl import ClassSecurityInfo -from AccessControl import Permissions -from zope.interface import implements from zExceptions import BadRequest +from zope.interface import implementer from Products.ZenRelations.RelSchema import ToOne, ToManyCont from Products.ZenWidgets import messaging -from Products.ZenModel.interfaces import IIndexed -from ZenModelRM import ZenModelRM -from ZenPackable import ZenPackable +from .interfaces import IIndexed +from .MibNode import MibNode +from .MibNotification import MibNotification +from .ZenModelRM import ZenModelRM +from .ZenPackable import ZenPackable def createOID(dmd, container, new_node, logger=None): @@ -33,7 +35,9 @@ def createOID(dmd, container, new_node, logger=None): logger.warn( "OID '%s' will be removed from organizer '%s' " "and added to organizer '%s'.", - new_node.oid, old_node.moduleName, new_node.moduleName + new_node.oid, + old_node.moduleName, + new_node.moduleName, ) old_node.getParentNode()._delObject(old_node.id) try: @@ -44,44 +48,52 @@ def createOID(dmd, container, new_node, logger=None): return container._getOb(new_node.id) +@implementer(IIndexed) class MibModule(ZenModelRM, ZenPackable): - - implements(IIndexed) - types = ('COUNTER', 'GAUGE', 'DERIVE', 'ABSOLUTE') + types = ("COUNTER", "GAUGE", "DERIVE", "ABSOLUTE") language = "" contact = "" description = "" _properties = ( - {'id': 'language', 'type': 'string', 'mode': 'w'}, - {'id': 'contact', 'type': 'string', 'mode': 'w'}, - {'id': 'description', 'type': 'string', 'mode': 'w'}, + {"id": "language", "type": "string", "mode": "w"}, + {"id": "contact", "type": "string", "mode": "w"}, + {"id": "description", "type": "string", "mode": "w"}, ) _relations = ZenPackable._relations + ( - ("miborganizer", ToOne(ToManyCont, "Products.ZenModel.MibOrganizer", "mibs")), + ( + "miborganizer", + ToOne(ToManyCont, "Products.ZenModel.MibOrganizer", "mibs"), + ), ("nodes", ToManyCont(ToOne, "Products.ZenModel.MibNode", "module")), - ("notifications", ToManyCont(ToOne, "Products.ZenModel.MibNotification", "module")), + ( + "notifications", + ToManyCont(ToOne, "Products.ZenModel.MibNotification", "module"), + ), ) # Screen action bindings (and tab definitions) - factory_type_information = ({ - 'immediate_view' : 'viewMibModule', - 'actions' : - ( - { 'id' : 'overview' - , 'name' : 'Overview' - , 'action' : 'viewMibModule' - , 'permissions' : (Permissions.view,) - }, - { 'id' : 'edit' - , 'name' : 'Edit' - , 'action' : 'editMibModule' - , 'permissions' : (Permissions.view,) - }, - ) - },) + factory_type_information = ( + { + "immediate_view": "viewMibModule", + "actions": ( + { + "id": "overview", + "name": "Overview", + "action": "viewMibModule", + "permissions": (Permissions.view,), + }, + { + "id": "edit", + "name": "Edit", + "action": "editMibModule", + "permissions": (Permissions.view,), + }, + ), + }, + ) security = ClassSecurityInfo() @@ -94,87 +106,84 @@ def nodeCount(self): def notificationCount(self): return self.notifications.countObjects() - def deleteMibNodes(self, ids=[], REQUEST=None): - """Delete MibNodes + def deleteMibNodes(self, ids, REQUEST=None): + """ + Delete MibNodes + + @type ids: Sequence[str] """ for node in self.nodes(): - id = getattr(node, 'id', None) + id = getattr(node, "id", None) if id in ids: self.nodes._delObject(id) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Mappings Deleted', - 'Mib nodes deleted: %s' % (', '.join(ids)) + "Mappings Deleted", "Mib nodes deleted: %s" % (", ".join(ids)) ) return self.callZenScreen(REQUEST) def addMibNode(self, id, oid, nodetype, REQUEST=None): - """Add a MibNode - """ + """Add a MibNode""" node = self.createMibNode( id, oid=oid, nodetype=nodetype, moduleName=self.id ) if REQUEST: if node: messaging.IMessageSender(self).sendToBrowser( - 'Mib Node Added', - 'Node %s was created with oid %s.' % (id, oid) + "Mib Node Added", + "Node %s was created with oid %s." % (id, oid), ) else: messaging.IMessageSender(self).sendToBrowser( - 'Invalid OID', - 'OID %s is invalid.' % oid, - priority=messaging.WARNING + "Invalid OID", + "OID %s is invalid." % oid, + priority=messaging.WARNING, ) return self.callZenScreen(REQUEST) def createMibNode(self, id, logger=None, **kwargs): - """Create a MibNotification - """ - from MibNode import MibNode + """Create a MibNotification""" return createOID(self.dmd, self.nodes, MibNode(id, **kwargs), logger) - def deleteMibNotifications(self, ids=[], REQUEST=None): - """Delete MibNotifications + def deleteMibNotifications(self, ids, REQUEST=None): + """ + Delete MibNotifications + + @type ids: Sequence[str] """ for notification in self.notifications(): - id = getattr(notification, 'id', None) + id = getattr(notification, "id", None) if id in ids: self.notifications._delObject(id) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Traps Deleted', - 'Traps deleted: %s' % (', '.join(ids)) + "Traps Deleted", "Traps deleted: %s" % (", ".join(ids)) ) return self.callZenScreen(REQUEST) def addMibNotification(self, id, oid, nodetype, REQUEST=None): - """Add a MibNotification - """ + """Add a MibNotification""" notification = self.createMibNotification( id, oid=oid, nodetype=nodetype, moduleName=self.id ) if REQUEST: if notification: messaging.IMessageSender(self).sendToBrowser( - 'Trap added', - 'Trap %s was created with oid %s.' % (id, oid) + "Trap added", + "Trap %s was created with oid %s." % (id, oid), ) else: messaging.IMessageSender(self).sendToBrowser( - 'Invalid OID', - 'OID %s is invalid.' % oid, - priority=messaging.WARNING + "Invalid OID", + "OID %s is invalid." % oid, + priority=messaging.WARNING, ) return self.callZenScreen(REQUEST) def createMibNotification(self, id, logger=None, **kwargs): - """Create a MibNotification - """ - from MibNotification import MibNotification + """Create a MibNotification""" return createOID( - self.dmd, self.notifications, - MibNotification(id, **kwargs), logger + self.dmd, self.notifications, MibNotification(id, **kwargs), logger ) diff --git a/Products/ZenModel/MibNode.py b/Products/ZenModel/MibNode.py index 7de83099c1..0585690ba8 100644 --- a/Products/ZenModel/MibNode.py +++ b/Products/ZenModel/MibNode.py @@ -1,44 +1,43 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from __future__ import absolute_import from AccessControl import Permissions -from Products.ZenModel.ZenossSecurity import * -from Products.ZenRelations.RelSchema import * +from Products.ZenRelations.RelSchema import ToManyCont, ToOne -from MibBase import MibBase +from .MibBase import MibBase -class MibNode(MibBase): - #syntax = "" +class MibNode(MibBase): access = "" _properties = MibBase._properties + ( - {'id':'access', 'type':'string', 'mode':'w'}, + {"id": "access", "type": "string", "mode": "w"}, ) _relations = MibBase._relations + ( ("module", ToOne(ToManyCont, "Products.ZenModel.MibModule", "nodes")), ) - + # Screen action bindings (and tab definitions) - factory_type_information = ( - { - 'immediate_view' : 'viewMibNode', - 'actions' : - ( - { 'id' : 'overview' - , 'name' : 'Overview' - , 'action' : 'viewMibNode' - , 'permissions' : ( Permissions.view, ) + factory_type_information = ( + { + "immediate_view": "viewMibNode", + "actions": ( + { + "id": "overview", + "name": "Overview", + "action": "viewMibNode", + "permissions": (Permissions.view,), }, - ) - }, - ) + ), + }, + ) diff --git a/Products/ZenModel/MibNotification.py b/Products/ZenModel/MibNotification.py index 47dd367a32..f86f81d542 100644 --- a/Products/ZenModel/MibNotification.py +++ b/Products/ZenModel/MibNotification.py @@ -1,44 +1,46 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from __future__ import absolute_import from AccessControl import Permissions -from Products.ZenModel.ZenossSecurity import * -from Products.ZenRelations.RelSchema import * +from Products.ZenRelations.RelSchema import ToManyCont, ToOne -from MibBase import MibBase +from .MibBase import MibBase -class MibNotification(MibBase): +class MibNotification(MibBase): objects = [] - - + _properties = MibBase._properties + ( - {'id':'objects', 'type':'lines', 'mode':'w'}, + {"id": "objects", "type": "lines", "mode": "w"}, ) - + _relations = MibBase._relations + ( - ("module", ToOne(ToManyCont, "Products.ZenModel.MibModule", "notifications")), + ( + "module", + ToOne(ToManyCont, "Products.ZenModel.MibModule", "notifications"), + ), ) - + # Screen action bindings (and tab definitions) - factory_type_information = ( - { - 'immediate_view' : 'viewMibNotification', - 'actions' : - ( - { 'id' : 'overview' - , 'name' : 'Overview' - , 'action' : 'viewMibNotification' - , 'permissions' : ( Permissions.view, ) + factory_type_information = ( + { + "immediate_view": "viewMibNotification", + "actions": ( + { + "id": "overview", + "name": "Overview", + "action": "viewMibNotification", + "permissions": (Permissions.view,), }, - ) - }, - ) + ), + }, + ) diff --git a/Products/ZenModel/MibOrganizer.py b/Products/ZenModel/MibOrganizer.py index 0f4f20c691..265d9308d1 100644 --- a/Products/ZenModel/MibOrganizer.py +++ b/Products/ZenModel/MibOrganizer.py @@ -7,26 +7,29 @@ # ############################################################################## +from __future__ import absolute_import + import logging -from App.special_dtml import DTMLFile +import six + +from AccessControl import ClassSecurityInfo, Permissions from AccessControl.class_init import InitializeClass -from AccessControl import ClassSecurityInfo -from AccessControl import Permissions +from App.special_dtml import DTMLFile from Products.Jobber.jobs import SubprocessJob -from Products.ZenModel.ZenossSecurity import ZEN_MANAGE_DMD, ZEN_ADD from Products.ZenRelations.RelSchema import ToOne, ToManyCont from Products.ZenUtils.Search import makeCaseInsensitiveKeywordIndex from Products.ZenWidgets import messaging from Products.ZenUtils.Utils import atomicWrite, binPath, zenPath -from Organizer import Organizer -from MibModule import MibModule -from ZenPackable import ZenPackable +from .MibModule import MibModule +from .Organizer import Organizer +from .ZenossSecurity import ZEN_MANAGE_DMD, ZEN_ADD +from .ZenPackable import ZenPackable -log = logging.getLogger('zen.Mibs') -_pathToMIB = '/var/ext/uploadedMIBs' +log = logging.getLogger("zen.Mibs") +_pathToMIB = "/var/ext/uploadedMIBs" def manage_addMibOrganizer(context, id, REQUEST=None): @@ -35,12 +38,12 @@ def manage_addMibOrganizer(context, id, REQUEST=None): context._setObject(id, sc) sc = context._getOb(id) if REQUEST is not None: - REQUEST['RESPONSE'].redirect( - context.absolute_url_path() + '/manage_main' + REQUEST["RESPONSE"].redirect( + context.absolute_url_path() + "/manage_main" ) -addMibOrganizer = DTMLFile('dtml/addMibOrganizer', globals()) +addMibOrganizer = DTMLFile("dtml/addMibOrganizer", globals()) def _oid2name(mibSearch, oid, exactMatch=True, strip=False): @@ -48,7 +51,7 @@ def _oid2name(mibSearch, oid, exactMatch=True, strip=False): MibOrganizer class and takes mibSearch as a parameter to make it easier to unit test. """ - oid = oid.strip('.') + oid = oid.strip(".") if exactMatch: brains = mibSearch(oid=oid) @@ -57,13 +60,13 @@ def _oid2name(mibSearch, oid, exactMatch=True, strip=False): else: return "" - oidlist = oid.split('.') + oidlist = oid.split(".") for i in range(len(oidlist), 0, -1): - brains = mibSearch(oid='.'.join(oidlist[:i])) + brains = mibSearch(oid=".".join(oidlist[:i])) if len(brains) < 1: continue if len(oidlist[i:]) > 0 and not strip: - return "%s.%s" % (brains[0].id, '.'.join(oidlist[i:])) + return "%s.%s" % (brains[0].id, ".".join(oidlist[i:])) else: return brains[0].id return "" @@ -72,30 +75,41 @@ def _oid2name(mibSearch, oid, exactMatch=True, strip=False): class MibOrganizer(Organizer, ZenPackable): meta_type = "MibOrganizer" dmdRootName = "Mibs" - default_catalog = 'mibSearch' + default_catalog = "mibSearch" security = ClassSecurityInfo() - _relations = Organizer._relations + ZenPackable._relations + ( - ("mibs", ToManyCont(ToOne,"Products.ZenModel.MibModule","miborganizer")), + _relations = ( + Organizer._relations + + ZenPackable._relations + + ( + ( + "mibs", + ToManyCont( + ToOne, "Products.ZenModel.MibModule", "miborganizer" + ), + ), + ) ) # Screen action bindings (and tab definitions) - factory_type_information = ({ - 'immediate_view' : 'mibOrganizerOverview', - 'actions' : - ( - { 'id' : 'overview' - , 'name' : 'Overview' - , 'action' : 'mibOrganizerOverview' - , 'permissions' : (Permissions.view,) - }, - ) - },) + factory_type_information = ( + { + "immediate_view": "mibOrganizerOverview", + "actions": ( + { + "id": "overview", + "name": "Overview", + "action": "mibOrganizerOverview", + "permissions": (Permissions.view,), + }, + ), + }, + ) def __init__( - self, id=None, description=None, text=None, - content_type='text/html'): + self, id=None, description=None, text=None, content_type="text/html" + ): if not id: id = self.dmdRootName super(MibOrganizer, self).__init__(id, description) @@ -124,22 +138,20 @@ def name2oid(self, name): """ Return an oid based on a name in the form MIB::name. """ - brains = self.getDmdRoot("Mibs").mibSearch({'id': name}) + brains = self.getDmdRoot("Mibs").mibSearch({"id": name}) if len(brains) > 0: return brains[0].oid - return '' + return "" def countClasses(self): - """Count all mibs with in a MibOrganizer. - """ + """Count all mibs with in a MibOrganizer.""" count = self.mibs.countObjects() for group in self.children(): count += group.countClasses() return count def createMibModule(self, name, path="/"): - """Create a MibModule - """ + """Create a MibModule""" mibs = self.getDmdRoot(self.dmdRootName) mod = None if not mod: @@ -150,39 +162,35 @@ def createMibModule(self, name, path="/"): return mod def manage_addMibModule(self, id, REQUEST=None): - """Create a new service class in this Organizer. - """ + """Create a new service class in this Organizer.""" mm = MibModule(id) self.mibs._setObject(id, mm) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Mib Module Created', - 'Mib module %s was created.' % id + "Mib Module Created", "Mib module %s was created." % id ) return self.callZenScreen(REQUEST) else: return self.mibs._getOb(id) def removeMibModules(self, ids=None, REQUEST=None): - """Remove MibModules from an EventClass. - """ + """Remove MibModules from an EventClass.""" if not ids: return self() - if isinstance(ids, basestring): + if isinstance(ids, six.string_types): ids = (ids,) for id in ids: self.mibs._delObject(id) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Mib Module Deleted', - 'Mib modules deleted: %s' % ', '.join(ids) + "Mib Module Deleted", + "Mib modules deleted: %s" % ", ".join(ids), ) return self() def moveMibModules(self, moveTarget, ids=None, REQUEST=None): - """Move MibModules from this organizer to moveTarget. - """ - if isinstance(ids, basestring): + """Move MibModules from this organizer to moveTarget.""" + if isinstance(ids, six.string_types): ids = (ids,) target = self.getChildMoveTarget(moveTarget) for id in ids: @@ -192,12 +200,12 @@ def moveMibModules(self, moveTarget, ids=None, REQUEST=None): target.mibs._setObject(id, rec) if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Mib Module Moved', - 'Mib modules moved to %s.' % moveTarget + "Mib Module Moved", "Mib modules moved to %s." % moveTarget ) - REQUEST['RESPONSE'].redirect(target.getPrimaryUrlPath()) + REQUEST["RESPONSE"].redirect(target.getPrimaryUrlPath()) + + security.declareProtected(ZEN_MANAGE_DMD, "reIndex") - security.declareProtected(ZEN_MANAGE_DMD, 'reIndex') def reIndex(self): """Go through all devices in this tree and reindex them.""" zcat = self._getOb(self.default_catalog) @@ -207,7 +215,8 @@ def reIndex(self): for thing in mib.nodes() + mib.notifications(): thing.index_object() - security.declareProtected(ZEN_ADD, 'createCatalog') + security.declareProtected(ZEN_ADD, "createCatalog") + def createCatalog(self): """Create a catalog for mibs searching""" from Products.ZCatalog.ZCatalog import manage_addZCatalog @@ -216,12 +225,12 @@ def createCatalog(self): manage_addZCatalog(self, self.default_catalog, self.default_catalog) zcat = self._getOb(self.default_catalog) cat = zcat._catalog - cat.addIndex('oid', makeCaseInsensitiveKeywordIndex('oid')) - cat.addIndex('id', makeCaseInsensitiveKeywordIndex('id')) - cat.addIndex('summary', makeCaseInsensitiveKeywordIndex('summary')) - zcat.addColumn('getPrimaryId') - zcat.addColumn('id') - zcat.addColumn('oid') + cat.addIndex("oid", makeCaseInsensitiveKeywordIndex("oid")) + cat.addIndex("id", makeCaseInsensitiveKeywordIndex("id")) + cat.addIndex("summary", makeCaseInsensitiveKeywordIndex("summary")) + zcat.addColumn("getPrimaryId") + zcat.addColumn("id") + zcat.addColumn("oid") def handleUploadedFile(self, REQUEST): """ @@ -235,16 +244,20 @@ def handleUploadedFile(self, REQUEST): atomicWrite(savedMIBPath, mibs, raiseException=True, createDir=True) # create the job - mypath = self.absolute_url_path().replace('/zport/dmd/Mibs', '') + mypath = self.absolute_url_path().replace("/zport/dmd/Mibs", "") if not mypath: - mypath = '/' + mypath = "/" commandArgs = [ - binPath('zenmib'), 'run', savedMIBPath, - '--path=%s' % mypath, '--mibdepsdir=%s' % zenPath(_pathToMIB) + binPath("zenmib"), + "run", + savedMIBPath, + "--path=%s" % mypath, + "--mibdepsdir=%s" % zenPath(_pathToMIB), ] return self.dmd.JobManager.addJob( - SubprocessJob, description="Load MIB at %s" % mypath, - kwargs=dict(cmd=commandArgs) + SubprocessJob, + description="Load MIB at %s" % mypath, + kwargs={"cmd": commandArgs}, ) diff --git a/Products/ZenModel/OperatingSystem.py b/Products/ZenModel/OperatingSystem.py index 59d4dc540d..32303d9836 100644 --- a/Products/ZenModel/OperatingSystem.py +++ b/Products/ZenModel/OperatingSystem.py @@ -34,7 +34,7 @@ class OperatingSystem(Software): - totalSwap = 0L + totalSwap = 0 uname = "" _properties = Software._properties + ( diff --git a/Products/ZenModel/PerformanceConf.py b/Products/ZenModel/PerformanceConf.py index 15301717be..f6cd7e07f6 100644 --- a/Products/ZenModel/PerformanceConf.py +++ b/Products/ZenModel/PerformanceConf.py @@ -582,7 +582,7 @@ def _getZenModelerCommand( cmd = [zm] deviceName = self._escapeParentheses(deviceName) options = [ - 'run', '--now', '-d', '"{}"'.format(deviceName), '--monitor', performanceMonitor, + 'run', '--now', '-d', deviceName, '--monitor', performanceMonitor, '--collect={}'.format(collectPlugins) ] cmd.extend(options) diff --git a/Products/ZenModel/SiteError.py b/Products/ZenModel/SiteError.py index 91391be0b4..591bf7a949 100644 --- a/Products/ZenModel/SiteError.py +++ b/Products/ZenModel/SiteError.py @@ -38,7 +38,7 @@ def cleanUrl(cls, errorUrl): def createEmailHeader(cls, fromAddress, toAddress, subject): - ''' Create the smnp header for an error email + ''' Create the snmp header for an error email ''' header = 'To: %s\nFrom: %s\nSubject: %s\n' % ( toAddress, fromAddress, subject) diff --git a/Products/ZenModel/ThresholdInstance.py b/Products/ZenModel/ThresholdInstance.py index ee59fa1500..07e97fd32c 100644 --- a/Products/ZenModel/ThresholdInstance.py +++ b/Products/ZenModel/ThresholdInstance.py @@ -7,19 +7,16 @@ # ############################################################################## +import logging -import os +from twisted.spread import pb from Products.ZenModel.PerformanceConf import PerformanceConf from Products.ZenModel.MonitorClass import MonitorClass -from Products.ZenUtils.Utils import unused, rrd_daemon_args, rrd_daemon_retry - -from twisted.spread import pb - -import logging from Products.ZenUtils.deprecated import deprecated +from Products.ZenUtils.Utils import unused -log = logging.getLogger('zen.ThresholdInstance') +log = logging.getLogger("zen.ThresholdInstance") class ThresholdContext(pb.Copyable, pb.RemoteCopy): @@ -31,31 +28,35 @@ class ThresholdContext(pb.Copyable, pb.RemoteCopy): def __init__(self, context): self.metricMetaData = {} if isinstance(context, MonitorClass): - self.deviceName = "{context.id} hub".format(context=context) - self.componentName = '' - self.deviceUrl = 'zport/dmd/Monitors/Hub/{context.id}/viewHubPerformance'.format(context=context) - self.devicePath = 'Monitors/Hub/{context.id}'.format(context=context) - self._contextKey = '/'.join(('Daemons', context.id)) + self.deviceName = "{ctx.id} hub".format(ctx=context) + self.componentName = "" + self.deviceUrl = ( + "zport/dmd/Monitors/Hub/{ctx.id}/viewHubPerformance" + ).format(ctx=context) + self.devicePath = "Monitors/Hub/{ctx.id}".format(ctx=context) + self._contextKey = "/".join(("Daemons", context.id)) elif isinstance(context, PerformanceConf): - self.deviceName = "{context.id} collector".format(context=context) - self.componentName = '' - self.deviceUrl = 'zport/dmd/Monitors/Performance/{context.id}/viewDaemonPerformance'.format(context=context) - self.devicePath = 'Monitors/Performance/{context.id}'.format(context=context) - self._contextKey = '/'.join(('Daemons', context.id)) + self.deviceName = "{ctx.id} collector".format(ctx=context) + self.componentName = "" + self.deviceUrl = ( + "zport/dmd/Monitors/Performance/{ctx.id}/viewDaemonPerformance" + ).format(ctx=context) + self.devicePath = "Monitors/Performance/{ctx.id}".format( + ctx=context + ) + self._contextKey = "/".join(("Daemons", context.id)) else: self.deviceName = context.device().id self.componentName = context.id if self.componentName == self.deviceName: - self.componentName = '' + self.componentName = "" self._contextKey = context.getUUID() self.metricMetaData = context.getMetricMetadata() self._contextUid = context.getPrimaryId() - - def key(self): "Unique data that refers this context" return self.deviceName, self.componentName @@ -83,9 +84,9 @@ def fileKey(self, dataPoint): # return os.path.join(self.rrdPath, dataPoint) - pb.setUnjellyableForClass(ThresholdContext, ThresholdContext) + class ThresholdInstance(pb.Copyable, pb.RemoteCopy): """A ThresholdInstance is a threshold to be evaluated in a collector within a given context.""" @@ -106,7 +107,6 @@ def key(self): def dataPoints(self): "Returns the names of the datapoints used to compute the threshold" - def checkValue(self, dataPoint, timestamp, value): """ Check if the value violates the threshold. @@ -124,7 +124,6 @@ def check(self, dataPoints): returns events or an empty sequence""" raise NotImplementedError() - @deprecated def checkRaw(self, dataPoint, timeOf, value): """A new datapoint has been collected, use the given _raw_ @@ -133,8 +132,9 @@ def checkRaw(self, dataPoint, timeOf, value): """ raise NotImplementedError() - def getGraphElements(self, template, context, gopts, namespace, color, - legend, relatedGps): + def getGraphElements( + self, template, context, gopts, namespace, color, legend, relatedGps + ): """Produce a visual indication on the graph of where the threshold applies.""" unused(template, context, gopts, namespace, color, legend, relatedGps) @@ -143,16 +143,17 @@ def getGraphElements(self, template, context, gopts, namespace, color, pb.setUnjellyableForClass(ThresholdInstance, ThresholdInstance) + class RRDThresholdInstance(ThresholdInstance): """ Deprecated """ + pb.setUnjellyableForClass(RRDThresholdInstance, RRDThresholdInstance) class MetricThresholdInstance(ThresholdInstance): - def __init__(self, id, context, dpNames, eventClass, severity): self._context = context self.id = id @@ -172,7 +173,6 @@ def dataPoints(self): "Returns the names of the datapoints used to compute the threshold" return self.dataPointNames - def checkValue(self, dataPoint, timestamp, value): return self._checkImpl(dataPoint, value) @@ -187,4 +187,5 @@ def _checkImpl(self, dataPoint, value): """ raise NotImplementedError() + pb.setUnjellyableForClass(MetricThresholdInstance, MetricThresholdInstance) diff --git a/Products/ZenModel/ValueChangeThreshold.py b/Products/ZenModel/ValueChangeThreshold.py index 10e91a2436..75ee6b79fe 100644 --- a/Products/ZenModel/ValueChangeThreshold.py +++ b/Products/ZenModel/ValueChangeThreshold.py @@ -7,52 +7,56 @@ # ############################################################################## - -from Products.ZenModel.ThresholdInstance import MetricThresholdInstance - -__doc__= """Threshold to track when a value changes. -""" +import logging from AccessControl.class_init import InitializeClass -from ThresholdClass import ThresholdClass -from ThresholdInstance import ThresholdContext +from twisted.spread import pb from zenoss.protocols.protobufs.zep_pb2 import SEVERITY_INFO + from Products.ZenEvents.ZenEventClasses import Status_Perf from Products.ZenUtils import Map -import logging -log = logging.getLogger('zen.MinMaxCheck') +from .ThresholdClass import ThresholdClass +from .ThresholdInstance import MetricThresholdInstance, ThresholdContext +log = logging.getLogger("zen.MinMaxCheck") + +NaN = float("nan") -NaN = float('nan') class ValueChangeThreshold(ThresholdClass): """ - Threshold that can watch changes in a value + Threshold that can watch changes in a value. """ eventClass = Status_Perf severity = SEVERITY_INFO def createThresholdInstance(self, context): - """Return the config used by the collector to process change thresholds """ - mmt = ValueChangeThresholdInstance(self.id, - ThresholdContext(context), - self.dsnames, - eventClass=self.eventClass, - severity=self.severity) + Return the config used by the collector to process change thresholds. + """ + mmt = ValueChangeThresholdInstance( + self.id, + ThresholdContext(context), + self.dsnames, + eventClass=self.eventClass, + severity=self.severity, + ) return mmt + InitializeClass(ValueChangeThreshold) ValueChangeThresholdClass = ValueChangeThreshold + class ValueChangeThresholdInstance(MetricThresholdInstance): """ - Threshold that emits an event when a value changes from its previous value. Does not send clear events. + Threshold that emits an event when a value changes from its + previous value. Does not send clear events. """ - lastValues = Map.Locked(Map.Timed({}, 60*60*24)) # 24-hour timeout + lastValues = Map.Locked(Map.Timed({}, 60 * 60 * 24)) # 24-hour timeout def _checkImpl(self, dataPoint, value): dpKey = self._getDpKey(dataPoint) @@ -62,7 +66,7 @@ def _checkImpl(self, dataPoint, value): # Update the value in the map. ValueChangeThresholdInstance.lastValues[dpKey] = value # .. Only create a change event if this isn't the first collection - if lastValue != None: + if lastValue is not None: event = dict( device=self.context().deviceName, summary="Value changed from %s to %s" % (lastValue, value), @@ -71,16 +75,19 @@ def _checkImpl(self, dataPoint, value): component=self.context().componentName, current=value, previous=lastValue, - severity=self.severity) + severity=self.severity, + ) return (event,) return tuple() def _getDpKey(self, dp): - return ':'.join(self.context().key()) + ':' + dp + return ":".join(self.context().key()) + ":" + dp def getGraphValues(self, relatedGps, context): # currently, no visualization implemented for this threshold type return () -from twisted.spread import pb -pb.setUnjellyableForClass(ValueChangeThresholdInstance, ValueChangeThresholdInstance) + +pb.setUnjellyableForClass( + ValueChangeThresholdInstance, ValueChangeThresholdInstance +) diff --git a/Products/ZenModel/ZDeviceLoader.py b/Products/ZenModel/ZDeviceLoader.py index 176f0682cf..b5839c339b 100644 --- a/Products/ZenModel/ZDeviceLoader.py +++ b/Products/ZenModel/ZDeviceLoader.py @@ -29,6 +29,7 @@ from DateTime import DateTime from OFS.SimpleItem import SimpleItem +from Products.Jobber.zenjobs import app from Products.ZenUtils.Utils import isXmlRpc, setupLoggingHeader from Products.ZenUtils.Utils import clearWebLoggingStream from Products.ZenUtils.IpUtil import getHostByName, ipwrap @@ -182,7 +183,7 @@ class CreateDeviceJob(Job): """ Create a new device object. """ - + name = 'CreateDeviceJob' # Declare DeviceExistsError as an expected exception so that a traceback # is not written to zenjobs' log. throws = Job.throws + (DeviceExistsError,) @@ -275,6 +276,7 @@ def setCustomProperty(self, dev, cProperty, value): return dev.setZenProperty(cProperty, value) +app.register_task(CreateDeviceJob) # alias the DeviceCreationJob so zenpacks don't break DeviceCreationJob = CreateDeviceJob diff --git a/Products/ZenModel/ZenMenuItem.py b/Products/ZenModel/ZenMenuItem.py index 7c827bdcd8..7240aaa0d1 100644 --- a/Products/ZenModel/ZenMenuItem.py +++ b/Products/ZenModel/ZenMenuItem.py @@ -56,12 +56,26 @@ def getMenuItemOwner(self): parent = aq_parent(parent) return parent - def __cmp__(self, other): - if isinstance(other, ZenMenuItem): - if other and other.ordering: - return cmp(other.ordering, self.ordering) - else: - return cmp(0.0, self.ordering) - return cmp(id(self), id(other)) - + def __eq__(self, other): + if not isinstance(other, ZenMenuItem): + return False + if self is other: + return True + return self.ordering == other.ordering + + def __lt__(self, other): + if not isinstance(other, ZenMenuItem): + return NotImplemented + if self is other: + return False + return self.ordering < other.ordering + + def __le__(self, other): + if not isinstance(other, ZenMenuItem): + return NotImplemented + if self is other: + return True + return self.ordering <= other.ordering + + InitializeClass(ZenMenuItem) diff --git a/Products/ZenModel/ZenPack.py b/Products/ZenModel/ZenPack.py index b305c97437..a040a03114 100644 --- a/Products/ZenModel/ZenPack.py +++ b/Products/ZenModel/ZenPack.py @@ -77,7 +77,7 @@ def __init__(self, *args, **kw): VersionBase.__init__(self, 'Zenoss', *args, **kw) -def needDir(path, perms=0750): +def needDir(path, perms=0o750): if not os.path.isdir(path): os.mkdir(path, perms) return path @@ -343,7 +343,7 @@ def storeBackup(self): """ backupDir = zenPath(".ZenPacks") if not os.path.isdir(backupDir): - os.makedirs(backupDir, 0750) + os.makedirs(backupDir, 0o750) src = self.eggPath() filename = "" @@ -493,7 +493,7 @@ def migrate(self, previousVersion=None): try: for instance in instances: - if instance.version >= migrateCutoff: + if instance.version.tuple() >= migrateCutoff: recover.append(instance) instance.migrate(self) except Exception as ex: diff --git a/Products/ZenModel/ZenPackLoader.py b/Products/ZenModel/ZenPackLoader.py index 06a89d4c3b..0cfb21612a 100644 --- a/Products/ZenModel/ZenPackLoader.py +++ b/Products/ZenModel/ZenPackLoader.py @@ -240,7 +240,7 @@ def _updateConfFile(self, pack): def load(self, pack, unused): for fs in findFiles(pack, 'daemons', filter=self.filter): - os.chmod(fs, 0755) + os.chmod(fs, 0o755) path = self.binPath(fs) if os.path.lexists(path): os.remove(path) @@ -286,7 +286,7 @@ def binPath(self, bin_file): def load(self, pack, unused): for fs in findFiles(pack, 'bin', filter=self.filter): - os.chmod(fs, 0755) + os.chmod(fs, 0o755) path = self.binPath(fs) if os.path.lexists(path): os.remove(path) @@ -314,7 +314,7 @@ def filter(self, f): def load(self, pack, unused): for fs in findFiles(pack, 'libexec', filter=self.filter): - os.chmod(fs, 0755) + os.chmod(fs, 0o755) def upgrade(self, pack, app): self.unload(pack, app) diff --git a/Products/ZenModel/ZenPacker.py b/Products/ZenModel/ZenPacker.py index 11f08e739e..ba906b2e43 100644 --- a/Products/ZenModel/ZenPacker.py +++ b/Products/ZenModel/ZenPacker.py @@ -60,6 +60,8 @@ def findObject(self, id): pass if len(result) == 0: try: + if isinstance(id, unicode): + id = id.encode('utf-8') result.append(self.dmd.unrestrictedTraverse(id)) except KeyError: pass diff --git a/Products/ZenModel/ZenossInfo.py b/Products/ZenModel/ZenossInfo.py index d586bb17f6..a709ab7c8e 100644 --- a/Products/ZenModel/ZenossInfo.py +++ b/Products/ZenModel/ZenossInfo.py @@ -20,7 +20,6 @@ import shutil import traceback import logging -import commands from AccessControl.class_init import InitializeClass from OFS.SimpleItem import SimpleItem @@ -303,19 +302,6 @@ def getRabbitMQVersion(self): from Products.ZenUtils.qverify import ZenAmqp return Version.parse("RabbitMQ %s" % ZenAmqp().getVersion()) - @versionmeta("Erlang", "http://www.erlang.org/") - def getErlangVersion(self): - retVal, output = commands.getstatusoutput('erl -noshell +V') - version = None - - if not retVal: - try: - version = re.findall(r'version (\S+)', output)[0] - except Exception: - pass - - return Version.parse("Erlang %s" % version) - def getAllVersions(self): """ Return a list of version numbers for currently tracked component @@ -329,7 +315,6 @@ def getAllVersions(self): self.getMySQLVersion, self.getTwistedVersion, self.getRabbitMQVersion, - self.getErlangVersion, self.getNetSnmpVersion, self.getPyNetSnmpVersion, self.getWmiVersion, diff --git a/Products/ZenModel/data/devices.xml b/Products/ZenModel/data/devices.xml index 8a8e1b691d..53d6dcc313 100644 --- a/Products/ZenModel/data/devices.xml +++ b/Products/ZenModel/data/devices.xml @@ -1,18 +1,30 @@ - + [] + +7200 + + +7200 + + +43200 + + +0 + @@ -47,9 +59,9 @@ v2c - + - + @@ -196,6 +208,9 @@ False [] + +['pack'] + [] diff --git a/Products/ZenModel/data/events.xml b/Products/ZenModel/data/events.xml index 761da52eae..4e4c1cbfce 100644 --- a/Products/ZenModel/data/events.xml +++ b/Products/ZenModel/data/events.xml @@ -1,14 +1,14 @@ - + True diff --git a/Products/ZenModel/data/manufacturers.xml b/Products/ZenModel/data/manufacturers.xml index cc3c25032e..e7b5345892 100644 --- a/Products/ZenModel/data/manufacturers.xml +++ b/Products/ZenModel/data/manufacturers.xml @@ -1,14 +1,14 @@ - + diff --git a/Products/ZenModel/data/monitorTemplate.xml b/Products/ZenModel/data/monitorTemplate.xml index cc6374d061..eb412e8b09 100644 --- a/Products/ZenModel/data/monitorTemplate.xml +++ b/Products/ZenModel/data/monitorTemplate.xml @@ -1,14 +1,14 @@ - + MonitorClass diff --git a/Products/ZenModel/data/osprocesses.xml b/Products/ZenModel/data/osprocesses.xml index 49bb78a488..f416f49b83 100644 --- a/Products/ZenModel/data/osprocesses.xml +++ b/Products/ZenModel/data/osprocesses.xml @@ -1,14 +1,14 @@ - + Base Zenoss daemons diff --git a/Products/ZenModel/data/services.xml b/Products/ZenModel/data/services.xml index db08689f08..53c558fc7b 100644 --- a/Products/ZenModel/data/services.xml +++ b/Products/ZenModel/data/services.xml @@ -1,14 +1,14 @@ - + False diff --git a/Products/ZenModel/data/zodb.sql.gz b/Products/ZenModel/data/zodb.sql.gz index eb6cc37714..a288bcaa71 100644 Binary files a/Products/ZenModel/data/zodb.sql.gz and b/Products/ZenModel/data/zodb.sql.gz differ diff --git a/Products/ZenModel/migrate/Migrate.py b/Products/ZenModel/migrate/Migrate.py index 153e34d57c..ac32eae76a 100644 --- a/Products/ZenModel/migrate/Migrate.py +++ b/Products/ZenModel/migrate/Migrate.py @@ -7,19 +7,23 @@ # ############################################################################## - -__doc__='''Migrate +'''Migrate A small framework for data migration. ''' +from __future__ import print_function + +import re + import transaction from Products.ZenUtils.ZenScriptBase import ZenScriptBase from Products.ZenUtils.Version import Version as VersionBase from Products.ZenReports.ReportLoader import ReportLoader from Products.ZenUtils.Utils import zenPath from Products.ZenModel.ZVersion import VERSION +from Products.ZenUtils.terminal_size import get_terminal_size import sys from textwrap import wrap @@ -60,17 +64,42 @@ def __init__(self): "self insert ourselves in the list of all steps" allSteps.append(self) - def __cmp__(self, other): - result = cmp(self.version, other.version) - if result: - return result - # if we're in the other dependency list, we are "less" - if self in other.getDependencies(): - return -1 - # if other is in the out dependency list, we are "greater" - if other in self.getDependencies(): - return 1 - return 0 + def __eq__(self, other): + if not isinstance(other, Step): + return False + if self is other: + return True + return ( + self.version == other.version + and self.dependencies == other.dependencies + ) + + def __lt__(self, other): + if not isinstance(other, Step): + return NotImplemented + if self is other: + return False + if self.version > other.version: + return False + return self._equivalency(other) + + def __le__(self, other): + if not isinstance(other, Step): + return NotImplemented + if self is other: + return True + return self._equivalency(other) + + def _equivalency(self, other): + if self.version > other.version: + return False + if self.version == other.version: + if self in other.getDependencies(): + return True + if other in self.getDependencies(): + return False + return self.name() < other.name() + return True def getDependencies(self): if not self.dependencies: @@ -121,11 +150,7 @@ def __init__(self, noopts=0): ZenScriptBase.__init__(self, noopts=noopts, connect=False) self.connect() self.allSteps = allSteps[:] - # 2 phase sorting - # 1. sort by name - self.allSteps.sort(lambda x,y: cmp(x.name(), y.name())) - # 2. sort by dependencies - self.allSteps.sort() + self.allSteps.sort() # _must_ sort the dependencies # Log output to a file # self.setupLogging() does *NOT* do what we want. @@ -426,34 +451,39 @@ def orderedSteps(self): def list(self, inputSteps=None, execSteps=None): steps = inputSteps or self.allSteps nameWidth = max(list(len(x.name()) for x in steps)) - + maxwidth = min(get_terminal_size().columns, 200) + indentSize = 8 + 3 + nameWidth + def switch(inp): switcher = { 1: ((" Ver Name" + " "*(nameWidth-3) + "Status\n" "--------+" + "-"*nameWidth +"+-------"), "%-8s %-{}s %-8s".format(nameWidth+1)), - 0: ((" Ver Name" + " "*(nameWidth-3) + "Description\n" - "--------+" + "-"*nameWidth +"+-----------" + "-"*30), + 0: ((" Ver Name" + " "*(nameWidth-2) + "Description\n" + "--------+" + "-"*(nameWidth+1) +"+-----------" + + "-"*(maxwidth - indentSize - 3)), "%-8s %-{}s %s".format(nameWidth+1)) } return switcher.get(inp) header, outputTemplate = switch(1 if inputSteps else 0) - print header + print(header) def printState(tpl, version, name, doc=None, status=None): if status: - print tpl%(version, name, status) + print(tpl%(version, name, status)) else: - print tpl%(version, name, doc) + print(tpl%(version, name, doc)) + indent = ' ' * indentSize + docWidth = maxwidth for s in steps: doc = s.__doc__ if not doc: doc = sys.modules[s.__class__.__module__].__doc__ \ or 'Not Documented' - doc.strip() - indent = ' '*22 - doc = '\n'.join(wrap(doc, width=80, + doc.strip() + doc = re.sub("\s+", " ", doc) + doc = '\n'.join(wrap(doc, width=docWidth, initial_indent=indent, subsequent_indent=indent)) doc = doc.lstrip() diff --git a/Products/ZenModel/migrate/addConfigCacheProperties.py b/Products/ZenModel/migrate/addConfigCacheProperties.py new file mode 100644 index 0000000000..e0139f9e47 --- /dev/null +++ b/Products/ZenModel/migrate/addConfigCacheProperties.py @@ -0,0 +1,93 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import + +from Products.ZenCollector.configcache.constants import Constants +from Products.ZenRelations.zPropertyCategory import setzPropertyCategory + +from . import Migrate + + +_properties = ( + ( + ( + Constants.device_time_to_live_id, + Constants.device_time_to_live_value, + ), + { + "type": "int", + "label": "Device configuration expiration", + "description": ( + "The maximum number of seconds to wait before rebuilding a " + "device configuration." + ), + }, + ), + ( + ( + Constants.device_minimum_time_to_live_id, + Constants.device_minimum_time_to_live_value, + ), + { + "type": "int", + "label": "Device configuration pre-expiration window", + "description": ( + "The number of seconds the configuration is protected " + "from being rebuilt." + ), + }, + ), + ( + ( + Constants.device_build_timeout_id, + Constants.device_build_timeout_value, + ), + { + "type": "int", + "label": "Device configuration build timeout", + "description": ( + "The number of seconds allowed for building a device " + "configuration." + ), + }, + ), + ( + ( + Constants.device_pending_timeout_id, + Constants.device_pending_timeout_value, + ), + { + "type": "int", + "label": "Device configuration build queued timeout", + "description": ( + "The number of seconds a device configuration build may be " + "queued before a timeout." + ), + }, + ), +) + + +class addConfigCacheProperties(Migrate.Step): + """ + Add the zDeviceConfigTTL, zDeviceConfigBuildTimeout, and + zDeviceConfigPendingTimeout z-properties to /Devices. + """ + + version = Migrate.Version(200, 7, 0) + + def cutover(self, dmd): + for args, kwargs in _properties: + if not dmd.Devices.hasProperty(args[0]): + dmd.Devices._setProperty(*args, **kwargs) + setzPropertyCategory(args[0], "Config Cache") + + +addConfigCacheProperties() diff --git a/Products/ZenModel/migrate/addzNoRelationshipCopy.py b/Products/ZenModel/migrate/addzNoRelationshipCopy.py new file mode 100644 index 0000000000..9f81c605e0 --- /dev/null +++ b/Products/ZenModel/migrate/addzNoRelationshipCopy.py @@ -0,0 +1,28 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +__doc__ = """ +add zNoRelationshipCopy property for devices +""" + +import logging +import Migrate + +log = logging.getLogger("zen.migrate") + + +class addzNoRelationshipCopy(Migrate.Step): + version = Migrate.Version(200, 7, 0) + + def cutover(self, dmd): + if not hasattr(dmd.Devices, "zNoRelationshipCopy"): + dmd.Devices._setProperty("zNoRelationshipCopy", ["pack"], type="lines") + + +addzNoRelationshipCopy() diff --git a/Products/ZenModel/migrate/import_export_filesystem.py b/Products/ZenModel/migrate/import_export_filesystem.py index f472ae45ab..fee4adbee9 100644 --- a/Products/ZenModel/migrate/import_export_filesystem.py +++ b/Products/ZenModel/migrate/import_export_filesystem.py @@ -30,6 +30,6 @@ def cutover(self, unused): for directory in ['import', 'export']: path = zenPath(directory) if not os.path.exists(path): - os.mkdir(path, 0750) + os.mkdir(path, 0o750) ImportExportFilesystem() diff --git a/Products/ZenModel/migrate/zensyslogSvcDefForMsgFiltering.py b/Products/ZenModel/migrate/zensyslogSvcDefForMsgFiltering.py new file mode 100644 index 0000000000..50bdd84607 --- /dev/null +++ b/Products/ZenModel/migrate/zensyslogSvcDefForMsgFiltering.py @@ -0,0 +1,117 @@ +############################################################################### +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function + +__doc__ = """ +Update zensyslog graphs config to add eventFilterDroppedCount number. +""" + +import logging + +import Migrate +import servicemigration as sm + +from servicemigration.metrics import Metric +from servicemigration.graphdatapoint import GraphDatapoint + + + +log = logging.getLogger("zen.migrate") +svcNamesToUpdate = ['zensyslog'] +sm.require("1.0.0") + + +class ZensyslogSvcDevForMsgParsing(Migrate.Step): + ''' + add 'Filter Dropped Events' to zensyslog 'Events' graph + ''' + + version = Migrate.Version(200, 7, 0) + + def cutover(self, dmd): + try: + ctx = sm.ServiceContext() + except sm.ServiceMigrationError: + log.error("Couldn't generate service context, skipping.") + return + + commit = False + + svcs = filter(lambda s: s.name in svcNamesToUpdate, ctx.services) + log.info("Found %i %r services to update.", len(svcs), svcNamesToUpdate) + for svc in svcs: + # Update the existing 'Events' graph with new graphpoint + gc = next( + (x for x in svc.monitoringProfile.graphConfigs if x.name == 'Events'), + None + ) + if gc is None: + log.error( + "%s service: No 'Events' graph configuration found; " + "broken service def; skipping.", svc.name) + continue + if not filter(lambda x: x.pointID == "eventFilterDroppedCount", gc.datapoints): + log.info( + "%s service: Adding 'Filter Dropped Events' graphpoint to the" + " 'Events' graph.", svc.name) + gc.datapoints.append( + GraphDatapoint( + aggregator='avg', + fill=False, + pointID='eventFilterDroppedCount', + legend='eventFilterDroppedCount', + metric='eventFilterDroppedCount', + metricSource='zensyslog', + name='Filter Dropped Events', + rate=False, + pointType='line' + ) + ) + commit = True + else: + log.info( + "%s service: 'Filter Dropped Events' graphpoint exists" + " on the 'Events' graph; skipping.", svc.name) + # Add new Service metric + mc = next( + (x for x in svc.monitoringProfile.metricConfigs + if x.name == 'zensyslog internal metrics'), + None + ) + if mc is None: + log.error( + "%s service: No 'zensyslog internal metrics' metric " + "config found; broken service def; skipping.", svc.name) + continue + else: + if not filter(lambda x: x.ID == "eventFilterDroppedCount", mc.metrics): + log.info( + "%s service: Adding 'Filter Dropped Events' " + "metric", svc.name) + mc.metrics.append( + Metric( + ID='eventFilterDroppedCount', + name='Filter Dropped Events', + unit='Events', + description='Total number of Filter-Dropped events.', + counter=False + ) + ) + commit = True + else: + log.info( + "%s service: 'Filter Dropped Events' metric " + "exists; skipping.", svc.name) + + if commit: + ctx.commit() + + +ZensyslogSvcDevForMsgParsing() diff --git a/Products/ZenModel/migrate/zensyslogSvcDefForMsgParsing.py b/Products/ZenModel/migrate/zensyslogSvcDefForMsgParsing.py new file mode 100644 index 0000000000..367a0d76b4 --- /dev/null +++ b/Products/ZenModel/migrate/zensyslogSvcDefForMsgParsing.py @@ -0,0 +1,118 @@ +############################################################################### +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function + +__doc__ = """ +Update zensyslog graphs config to add eventParserDroppedCount number. +""" + +import logging + +import Migrate +import servicemigration as sm + +from servicemigration.metrics import Metric +from servicemigration.graphdatapoint import GraphDatapoint + + + +log = logging.getLogger("zen.migrate") +svcNamesToUpdate = ['zensyslog'] +sm.require("1.0.0") + + +class ZensyslogSvcDevForMsgParsing(Migrate.Step): + ''' + add 'Parser Dropped Events' to zensyslog 'Events' graph + ''' + + version = Migrate.Version(200, 7, 0) + + def cutover(self, dmd): + try: + ctx = sm.ServiceContext() + except sm.ServiceMigrationError: + log.error("Couldn't generate service context, skipping.") + return + + commit = False + + svcs = filter(lambda s: s.name in svcNamesToUpdate, ctx.services) + log.info("Found %i %r services to update.", len(svcs), svcNamesToUpdate) + for svc in svcs: + collectorName = ctx.getServiceParent(svc).name + # Update the existing 'Events' graph with new graphpoint + gc = next( + (x for x in svc.monitoringProfile.graphConfigs if x.name == 'Events'), + None + ) + if gc is None: + log.error( + "%s %s service: No 'Events' graph configuration found; " + "broken service def; skipping.", collectorName, svc.name) + continue + if not filter(lambda x: x.pointID == "eventParserDroppedCount", gc.datapoints): + log.info( + "%s %s service: Adding 'Parser Dropped Events' graphpoint to the" + " 'Events' graph.", collectorName, svc.name) + gc.datapoints.append( + GraphDatapoint( + aggregator='avg', + fill=False, + pointID='eventParserDroppedCount', + legend='eventParserDroppedCount', + metric='eventParserDroppedCount', + metricSource='zensyslog', + name='Parser Dropped Events', + rate=False, + pointType='line' + ) + ) + commit = True + else: + log.info( + "%s %s service: 'Parser Dropped Events' graphpoint exists" + " on the 'Events' graph; skipping.", collectorName, svc.name) + # Add new Service metric + mc = next( + (x for x in svc.monitoringProfile.metricConfigs + if x.name == 'zensyslog internal metrics'), + None + ) + if mc is None: + log.error( + "%s %s service: No 'zensyslog internal metrics' metric " + "config found; broken service def; skipping.", collectorName, svc.name) + continue + else: + if not filter(lambda x: x.ID == "eventParserDroppedCount", mc.metrics): + log.info( + "%s %s service: Adding 'Parser Dropped Events' " + "metric", collectorName, svc.name) + mc.metrics.append( + Metric( + ID='eventParserDroppedCount', + name='Parser Dropped Events', + unit='Events', + description='Total number of Parser-Dropped events.', + counter=False + ) + ) + commit = True + else: + log.info( + "%s %s service: 'Parser Dropped Events' metric " + "exists; skipping.", collectorName, svc.name) + + if commit: + ctx.commit() + + +ZensyslogSvcDevForMsgParsing() diff --git a/Products/ZenModel/migrate/zentrapSvcDefForFiltering.py b/Products/ZenModel/migrate/zentrapSvcDefForFiltering.py new file mode 100644 index 0000000000..0b1f04dc72 --- /dev/null +++ b/Products/ZenModel/migrate/zentrapSvcDefForFiltering.py @@ -0,0 +1,235 @@ +############################################################################### +# +# Copyright (C) Zenoss, Inc. 2023, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import print_function + +__doc__ = """ +Update zentrap graphs config to add eventFilterDroppedCount number. +""" + +import logging +import re + +import Migrate +import servicemigration as sm + +from servicemigration.metrics import Metric +from servicemigration.graphrange import GraphRange +from servicemigration.graphconfig import GraphConfig +from servicemigration.graphdatapoint import GraphDatapoint + + + +log = logging.getLogger("zen.migrate") +svcNamesToUpdate = ['zentrap'] +sm.require("1.0.0") + + +class ZentrapSvcDevForMsgParsing(Migrate.Step): + ''' + add 'Filter Dropped Events' to zentrap 'Events' graph + ''' + + version = Migrate.Version(200, 7, 0) + + def cutover(self, dmd): + try: + ctx = sm.ServiceContext() + except sm.ServiceMigrationError: + log.error("Couldn't generate service context, skipping.") + return + + commit = False + + svcs = filter(lambda s: s.name in svcNamesToUpdate, ctx.services) + log.info("Found %i %r services to update.", len(svcs), svcNamesToUpdate) + for svc in svcs: + # Update new global config with any previous, file defined filters + fc = next( + (x for x in svc.configFiles if x.name == '/opt/zenoss/etc/zentrap.filter.conf'), + None + ) + collectorName = ctx.getServiceParent(svc).name + if fc is None: + log.error( + "%s %s service: No 'zentrap.filter.conf' File configuration found; " + "broken service def; skipping.", collectorName, svc.name) + continue + + ofc = next( + (x for x in svc.originalConfigs if x.name == '/opt/zenoss/etc/zentrap.filter.conf'), + None + ) + if fc.content == ofc.content: + log.info( + "%s %s service: 'zentrap.filter.conf' contents are the default" + "; skipping.", collectorName, svc.name) + else: + log.info( + "%s %s service: found collector specific trap filter " + "configurations in 'zentrap.filter.conf'", + collectorName, svc.name) + collectorCfg = [] + for lineNumber, line in enumerate(fc.content.split('\n')): + if line.startswith('#'): + continue + # skip blank lines + if not line.strip(): + continue + if not re.search( + '^({} )*{}$'.format( + collectorName, + line.replace('*', '\*')), + dmd.ZenEventManager.trapFilters, + re.MULTILINE): + log.info( + '%s %s service: migrating to global config. "%s %s"', + collectorName, svc.name, + collectorName, line) + collectorCfg.append("{} {}".format( + collectorName, + line)) + if collectorCfg: + collectorCfg.insert( + 0, + '# Migrated from {} "zentrap.filter.conf" file.'.format( + collectorName + ) + ) + collectorCfg.insert(0, '') + collectorCfg.append('') + dmd.ZenEventManager.trapFilters += '\n'.join(collectorCfg) + else: + log.info( + '%s %s service: No configs migrated, must be already defined/migrated.', + collectorName, svc.name) + + # 'Events' graph .... + gc = next( + (x for x in svc.monitoringProfile.graphConfigs if x.name == 'Events'), + None + ) + if gc is None: + # 'Events' graph is missing ?!? + log.info( + "%s %s service: No 'Events' graph configuration found; " + "creating it.", collectorName, svc.name) + gcs = svc.monitoringProfile.graphConfigs + gcs.insert( + 2, + GraphConfig( + graphID="events", + name="Events", + footer=False, + returnset='EXACT', + graphType='line', + yAxisLabel='Events', + description='Events', + graphRange=GraphRange( + start='1h-ago', + end='0s-ago' + ), + units="Events", + datapoints=[ + GraphDatapoint( + aggregator="avg", + fill=False, + pointID="events", + legend="Queued", + metric="events", + metricSource="zentrap", + name="Events", + rate=True, + pointType="line" + ), + GraphDatapoint( + aggregator="avg", + fill=False, + pointID="events", + legend="Events", + metric="eventCount", + metricSource="zentrap", + name="Event Count", + rate=False, + pointType="line" + ), + GraphDatapoint( + aggregator='avg', + fill=False, + pointID='eventFilterDroppedCount', + legend='eventFilterDroppedCount', + metric='eventFilterDroppedCount', + metricSource='zentrap', + name='Filter Dropped Events', + rate=False, + pointType='line' + ) + ] + ) + ) + else: + # Update the existing 'Events' graph with new graphpoint + if not filter(lambda x: x.pointID == "eventFilterDroppedCount", gc.datapoints): + log.info( + "%s %s service: Adding 'Filter Dropped Events' graphpoint to the" + " 'Events' graph.", collectorName, svc.name) + gc.datapoints.append( + GraphDatapoint( + aggregator='avg', + fill=False, + pointID='eventFilterDroppedCount', + legend='eventFilterDroppedCount', + metric='eventFilterDroppedCount', + metricSource='zentrap', + name='Filter Dropped Events', + rate=False, + pointType='line' + ) + ) + commit = True + else: + log.info( + "%s %s service: 'Filter Dropped Events' graphpoint exists" + " on the 'Events' graph; skipping.", collectorName, svc.name) + # Add new Service metric + mc = next( + (x for x in svc.monitoringProfile.metricConfigs + if x.name == 'zentrap internal metrics'), + None + ) + if mc is None: + log.error( + "%s %s service: No 'zentrap internal metrics' metric " + "config found; broken service def; skipping.", collectorName, svc.name) + continue + else: + if not filter(lambda x: x.ID == "eventFilterDroppedCount", mc.metrics): + log.info( + "%s %s service: Adding 'Filter Dropped Events' " + "metric", collectorName, svc.name) + mc.metrics.append( + Metric( + ID='eventFilterDroppedCount', + name='Filter Dropped Events', + unit='Events', + description='Total number of Filter-Dropped events.', + counter=False + ) + ) + commit = True + else: + log.info( + "%s %s service: 'Filter Dropped Events' metric " + "exists; skipping.", collectorName, svc.name) + + if commit: + ctx.commit() + + +ZentrapSvcDevForMsgParsing() diff --git a/Products/ZenModel/permissions.zcml b/Products/ZenModel/permissions.zcml index baec310405..9262827552 100644 --- a/Products/ZenModel/permissions.zcml +++ b/Products/ZenModel/permissions.zcml @@ -1,5 +1,5 @@ - + + @@ -188,4 +188,5 @@ id="zenoss.ManageTrigger" title="Manage Trigger" /> + diff --git a/Products/ZenModel/tests/IpUtilTest.py b/Products/ZenModel/tests/IpUtilTest.py index caaa8b0312..3d46d14d11 100644 --- a/Products/ZenModel/tests/IpUtilTest.py +++ b/Products/ZenModel/tests/IpUtilTest.py @@ -137,14 +137,14 @@ def testNumbIpGood(self): self.assertEqual( IpUtil.numbip( '192.168.2.3'), - 3232236035L) + 3232236035) def testStripGood(self): '''check that the strip function can convert a number back into an IP''' self.assertEqual( IpUtil.strip( - 3232236035L), + 3232236035), '192.168.2.3') def testGetNetBadIp(self): @@ -173,7 +173,7 @@ def testGetNetAllGood(self): IpUtil.getnet( '192.168.2.3', '255.255.240.0'), - 3232235520L) + 3232235520) def testGetNetStr(self): '''check to make sure getnetstr works fine, diff --git a/Products/ZenModel/tests/testIpAddress.py b/Products/ZenModel/tests/testIpAddress.py index 331fb96c7e..5e033798d3 100644 --- a/Products/ZenModel/tests/testIpAddress.py +++ b/Products/ZenModel/tests/testIpAddress.py @@ -28,6 +28,8 @@ class TestIpAddress(ZenModelBaseTest): def afterSetUp(self): super(TestIpAddress, self).afterSetUp() self.dev = self.dmd.Devices.createInstance("testdev") + self.dev.setManageIp('2.3.4.5') + self.maddr = self.dev.ipaddress().primaryAq() tmpIface = IpInterface('test') self.dev.os.interfaces._setObject('test',tmpIface) self.iface = self.dev.getDeviceComponents()[0] @@ -47,6 +49,15 @@ def testSetNetmask(self): self.addr.setNetmask(8) self.assert_(self.addr.getIpAddress() == '1.2.3.4/8') + + def testDeviceDelete(self): + maddrPath = self.maddr.getPrimaryId() + addrPath = self.addr.getPrimaryId() + self.dev.deleteDevice() + self.assert_(self.maddr.manageDevice() is None) + self.assert_(self.addr.interface() is None) + + # def testSetIpAddress(self): # self.addr.setIpAddress('2.3.4.5/16') # self.assert_(self.addr.getIpAddress() == '2.3.4.5/16') diff --git a/Products/ZenModel/tests/testMaintenanceWindow.py b/Products/ZenModel/tests/testMaintenanceWindow.py index d0cf77a85b..40b0f05225 100644 --- a/Products/ZenModel/tests/testMaintenanceWindow.py +++ b/Products/ZenModel/tests/testMaintenanceWindow.py @@ -339,7 +339,7 @@ class multiWindow: multiWin.dev.setGroups(multiWin.grp.id) multiWin.startDateTime = '1138531500' - startDate_time = [ 2006, 1, 31, 10, 00, 12, 0, 0, 0 ] + startDate_time = [ 2006, 1, 31, 10, 0, 12, 0, 0, 0 ] multiWin.tn = range(1,maxWindows) multiWin.time_tn = [] multiWin.mwIds = [] diff --git a/Products/ZenModel/tests/testMigrate.py b/Products/ZenModel/tests/testMigrate.py index 43a8f7ffa9..74acb593b9 100644 --- a/Products/ZenModel/tests/testMigrate.py +++ b/Products/ZenModel/tests/testMigrate.py @@ -13,14 +13,17 @@ from Products.ZenModel.migrate.Migrate import Migration, Version, Step class MyTestStep(Step): - def __init__(self, major, minor, micro): + def __init__(self, major, minor, micro, name=None): self.version = Version(major, minor, micro) + self._name = name or "MyTestStep" def __cutover__(self): pass def __cleanup__(self): pass def name(self): - return 'MyTestStep_%s' % self.version.short() + return '%s_%s' % (self._name, self.version.short()) + def __repr__(self): + return self.name() step300 = MyTestStep(3, 0, 0) step30_70 = MyTestStep(3, 0, 70) @@ -153,6 +156,24 @@ def testDetermineSteps(self): m.options.steps = ['MyTestStep_1.1.0'] self.assertEquals(m.determineSteps(), m.allSteps[1:2]) + def testDependencies(t): + m = Migration(noopts=True) + s1 = MyTestStep(1, 0, 0, name="StepA") + s2 = MyTestStep(1, 0, 0, name="StepB") + s3 = MyTestStep(1, 1, 0, name="StepC") + s4 = MyTestStep(1, 1, 0, name="StepD") + s5 = MyTestStep(1, 2, 0, name="StepE") + s6 = MyTestStep(1, 2, 0, name="StepCe") + s5.dependencies = [s3] + s3.dependencies = [s2, s4] + s1.dependencies = [s2] + m.allSteps = [s1, s2, s3, s4, s5, s6] + m.allSteps.sort() + m.options.level = "1.0.0" + t.assertEquals(m.determineSteps(), [s2, s1, s4, s3, s6, s5]) + m.options.level = "1.1.0" + t.assertEquals(m.determineSteps(), [s4, s3, s6, s5]) + def test_suite(): from unittest import TestSuite, makeSuite diff --git a/Products/ZenModel/tests/testRRDTemplates.py b/Products/ZenModel/tests/testRRDTemplates.py index dad53de469..582a7542fb 100644 --- a/Products/ZenModel/tests/testRRDTemplates.py +++ b/Products/ZenModel/tests/testRRDTemplates.py @@ -68,6 +68,41 @@ def testTemplateRetrieval(self): self.assert_('test2' not in lintemps) self.assert_('test3' in lintemps) + def testDeviceTemplateSelection(self): + # test correct selection of templates for a device + + devices = self.dmd.Devices + server = self.dmd.Devices.createOrganizer('/Server') + linux = self.dmd.Devices.createOrganizer('/Server/Linux') + + devices.manage_addRRDTemplate('Device') + server.manage_addRRDTemplate('Device-addition') + server.manage_addRRDTemplate('Device-replacement') + linux.manage_addRRDTemplate('Device') + # the next two should be ignored because the base template does not exist + linux.manage_addRRDTemplate('nothere-additional') + linux.manage_addRRDTemplate('nothere-replacement') + + devdev = devices.createInstance('devdev') + devdev.setZenProperty('zDeviceTemplates', ['Device']) + serdev = devices.createInstance('serdev') + serdev.setZenProperty('zDeviceTemplates', ['Device']) + lindev = devices.createInstance('lindev') + lindev.setZenProperty('zDeviceTemplates', ['Device']) + + getid = lambda x:'/'.join((x.getRRDPath(), x.id)) + devtemps = map(getid, devdev.getRRDTemplates()) + sertemps = map(getid, serdev.getRRDTemplates()) + lintemps = map(getid, lindev.getRRDTemplates()) + + devtmpls = ['/Devices/Device'] + sertmpls = ['/Devices/Server/Device-replacement', '/Devices/Device-addition'] + lintmpls = ['/Devices/Server/Linux/Device', '/Devices/Device-addition'] + + self.assertEqual(devtmpls, devtemps) + self.assertEqual(devtmpls, sertemps) + self.assertEqual(devtmpls, lintemps) + def test_suite(): from unittest import TestSuite, makeSuite suite = TestSuite() diff --git a/Products/ZenRRD/RRDDaemon.py b/Products/ZenRRD/RRDDaemon.py index 195b28ddb8..0a21204ca4 100644 --- a/Products/ZenRRD/RRDDaemon.py +++ b/Products/ZenRRD/RRDDaemon.py @@ -18,7 +18,6 @@ from Products.ZenEvents import Event from Products.ZenHub.PBDaemon import FakeRemote, PBDaemon -from Products.ZenUtils.Utils import unused from .Thresholds import Thresholds @@ -52,8 +51,8 @@ def __init__(self, name, noopts=False): @param noopts: process command-line arguments? @type noopts: boolean """ + super(RRDDaemon, self).__init__(noopts, name=name) self.events = [] - PBDaemon.__init__(self, noopts, name=name) self.thresholds = Thresholds() def getDevicePingIssues(self): @@ -75,16 +74,6 @@ def remote_setPropertyItems(self, items): self.log.debug("Async update of collection properties") self.setPropertyItems(items) - def remote_updateDeviceList(self, devices): - """ - Callable from zenhub. - - @param devices: list of devices (unused) - @type devices: list - """ - unused(devices) - self.log.debug("Async update of device list") - def setPropertyItems(self, items): """ Set zProperties @@ -113,7 +102,7 @@ def buildOptions(self): """ Command-line options to add """ - PBDaemon.buildOptions(self) + super(RRDDaemon, self).buildOptions() self.parser.add_option( "-d", "--device", diff --git a/Products/ZenRRD/runner.py b/Products/ZenRRD/runner.py index 400586729f..c008ab5be2 100644 --- a/Products/ZenRRD/runner.py +++ b/Products/ZenRRD/runner.py @@ -211,8 +211,8 @@ def __init__(self, proxy, client): self.manageIp = self.proxy.manageIp self.port = self.proxy.zCommandPort - _username = self.proxy.zCommandUsername - _password = self.proxy.zCommandPassword + _username = self.proxy.zCommandUsername or "" + _password = self.proxy.zCommandPassword or "" _loginTimeout = self.proxy.zCommandLoginTimeout _commandTimeout = self.proxy.zCommandCommandTimeout _keyPath = self.proxy.zKeyPath diff --git a/Products/ZenRRD/zencommand.py b/Products/ZenRRD/zencommand.py index fb6fe49dfd..f5fd9e7fd3 100755 --- a/Products/ZenRRD/zencommand.py +++ b/Products/ZenRRD/zencommand.py @@ -686,7 +686,8 @@ def _parseResults(self, resultList): for success, (command, result) in resultList: parse = self._parse_result if success else self._parse_error datasources = self._commandMap.get(command) - parsed_results.extend(parse(datasources, result)) + if datasources: + parsed_results.extend(parse(datasources, result)) return parsed_results def _timeout_error_result(self, datasource): diff --git a/Products/ZenRRD/zenperfsnmp.py b/Products/ZenRRD/zenperfsnmp.py index 3f5981f334..874f2f5b07 100755 --- a/Products/ZenRRD/zenperfsnmp.py +++ b/Products/ZenRRD/zenperfsnmp.py @@ -21,8 +21,10 @@ import zope.interface -from pynetsnmp.netsnmp import SnmpTimeoutError, SnmpError -from pynetsnmp.twistedsnmp import snmpprotocol, Snmpv3Error +from pynetsnmp import oids +from pynetsnmp.netsnmp import SnmpTimeoutError, NetSnmpError +from pynetsnmp.twistedsnmp import snmpprotocol +from pynetsnmp.errors import SnmpUsmError, SnmpUsmStatsError from twisted.internet import defer, error from Products.ZenCollector.daemon import CollectorDaemon @@ -133,6 +135,17 @@ class StopTask(Exception): STATUS_EVENT = {"eventClass": Status_Snmp, "eventGroup": "SnmpTest"} +_usm_stat_message = { + oids.WrongDigest: ( + "invalid zSnmpAuthPassphrase/zSnmpAuthProtocol properties" + ), + oids.DecryptionError: ( + "invalid zSnmpPrivPassphrase/zSnmpPrivProtocol properties" + ), + oids.UnknownUserName: "invalid zSnmpSecurityName property", + oids.UnknownSecurityLevel: "invalid zSnmp* properties", +} + @zope.interface.implementer(IScheduledTask) class SnmpPerformanceCollectionTask(BaseTask): @@ -189,6 +202,7 @@ def __init__( self._snmpProxy = None self._snmpConnInfo = self._device.snmpConnInfo + log.info("SNMP info summary %s", self._snmpConnInfo.summary()) self._oids = self._device.oids self._oidDeque = deque(self._oids.keys()) self._good_oids = set() @@ -258,7 +272,7 @@ def _checkTaskTime(self): def getOidsSet(self): if self._chosenOid: - return set(oid for oid in self._oids if self._chosenOid in oid) + return {oid for oid in self._oids if self._chosenOid in oid} else: return set(self._oids) @@ -366,7 +380,7 @@ def _fetchPerfChunk(self, oid_chunk): self._snmpConnInfo.zSnmpTimeout, self._snmpConnInfo.zSnmpTries, ) - except (error.TimeoutError, SnmpTimeoutError) as e: + except (error.TimeoutError, SnmpTimeoutError, SnmpUsmStatsError): raise except Exception as e: log.exception( @@ -392,31 +406,19 @@ def _fetchPerfChunk(self, oid_chunk): # remove them from good oids. These will run in single mode so # we can figure out which ones are good or bad. if len(oid_chunk) == 1: - self.remove_from_good_oids(oid_chunk) - self._addBadOids(oid_chunk) - log.warn( - "No return result, marking as bad oid: {%s} {%s}", - self.configId, - oid_chunk, - ) + self._mark_bad_oids(oid_chunk) else: - log.warn( - "No return result, will run in separately to determine " - "which oids are valid: {%s} {%s}", + log.warning( + "No results returned, will run in separately to " + "determine which oids are valid device=%s oids=%s", self.configId, oid_chunk, ) - self.remove_from_good_oids(oid_chunk) + self._remove_from_good_oids(oid_chunk) else: for oid in oid_chunk: if oid not in update: - log.error( - "SNMP get did not return result: %s %s", - self.configId, - oid, - ) - self.remove_from_good_oids([oid]) - self._addBadOids([oid]) + self._mark_bad_oids([oid]) self.state = SnmpPerformanceCollectionTask.STATE_STORE_PERF try: for oid, value in update.items(): @@ -431,12 +433,7 @@ def _fetchPerfChunk(self, oid_chunk): # We should always get something useful back if value == "" or value is None: if oid not in self._bad_oids: - log.error( - "SNMP get returned empty value: %s %s", - self.configId, - oid, - ) - self._addBadOids([oid]) + self._mark_bad_oids([oid]) continue self._good_oids.add(oid) @@ -540,7 +537,7 @@ def _doCollectOids(self, ignored): except StopTask as e: taskStopped = True self._stoppedTaskCount += 1 - log.warn( + log.warning( "Device %s [%s] Task stopped collecting to avoid " "exceeding cycle interval - %s", self._devId, @@ -550,12 +547,20 @@ def _doCollectOids(self, ignored): self._logOidsNotCollected( "Task was stopped so as not exceed cycle interval" ) - except (error.TimeoutError, SnmpTimeoutError) as e: + except (error.TimeoutError, SnmpTimeoutError): log.debug( "Device %s [%s] snmp timed out ", self._devId, self._manageIp, ) + except SnmpUsmStatsError as ex: + message = _usm_stat_message.get(ex.oid) + if not message: + # The UnknownSecurityLevel message also works as a + # generic USM stats error message. + message = _usm_stat_message.get(oids.UnknownSecurityLevel) + log.error("%s device=%s", message, self._devId) + raise if self._snmpConnInfo.zSnmpVer == "v3": self._sendStatusEvent( @@ -635,7 +640,7 @@ def _doCollectOids(self, ignored): ) except CycleExceeded as e: self._cycleExceededCount += 1 - log.warn( + log.warning( "Device %s [%s] scan stopped because time exceeded " "cycle interval, %s", self._devId, @@ -647,44 +652,57 @@ def _doCollectOids(self, ignored): "Scan stopped; Collection time exceeded interval - %s" % e, eventKey="interval_exceeded", ) - except Snmpv3Error as e: + except SnmpUsmError as e: self._logOidsNotCollected("of %s" % (e,)) self._snmpV3ErrorCount += 1 - summary = "Cannot connect to SNMP agent on {0._devId}: {1}".format( - self, e + log.error( + "cannot connect to SNMP agent device=%s error=%s", + self.configId, + e, + ) + self._sendStatusEvent( + "Cannot connect to SNMP agent on {0._devId}: {1}".format( + self, e + ), + eventKey="snmp_v3_error", ) - log.error("%s on %s", summary, self.configId) - self._sendStatusEvent(summary, eventKey="snmp_v3_error") - except SnmpError as e: + except NetSnmpError as e: self._logOidsNotCollected("of %s" % (e,)) - summary = "Cannot connect to SNMP agent on {0._devId}: {1}".format( - self, e + log.error( + "cannot connect to SNMP agent device=%s error=%s", + self.configId, + e, + ) + self._sendStatusEvent( + "Cannot connect to SNMP agent on {0._devId}: {1}".format( + self, e + ), + eventKey="snmp_error", ) - log.error("%s on %s", summary, self.configId) - self._sendStatusEvent(summary, eventKey="snmp_error") finally: self._logTaskOidInfo(previous_bad_oids) - def remove_from_good_oids(self, oids): + def _remove_from_good_oids(self, oids): self._good_oids.difference_update(oids) - def _addBadOids(self, oids): + def _mark_bad_oids(self, oids): """ Report any bad OIDs and then track the OID so we don't generate any further errors. """ # make sure oids aren't in good set - self.remove_from_good_oids(oids) + self._remove_from_good_oids(oids) for oid in oids: - if oid in self._oids: - self._bad_oids.add(oid) - names = [dp[0] for dp in self._oids[oid]] - summary = "Error reading value for %s (%s) on %s" % ( - names, - oid, - self._devId, - ) - log.warn(summary) + if oid in self._bad_oids or oid not in self._oids: + continue + self._bad_oids.add(oid) + names = [dp[0] for dp in self._oids[oid]] + log.warning( + "no result for oid device=%s oid=%s names=%s", + self._devId, + oid, + names, + ) def _finished(self, result): """ @@ -697,12 +715,12 @@ def _finished(self, result): try: self._close() except Exception as ex: - log.warn("Failed to close device %s: error %s", self._devId, ex) + log.warning("Failed to close device %s: error %s", self._devId, ex) doTask_end = datetime.now() duration = doTask_end - self._doTask_start if duration > timedelta(seconds=self._device.cycleInterval): - log.warn( + log.warning( "Collection for %s took %s seconds; " "cycle interval is %s seconds.", self.configId, @@ -792,7 +810,7 @@ def _logOidsNotCollected(self, reason): oidsNotCollected, ) - def _connect(self, result=None): + def _connect(self, ignored=None): """ Create a connection to the remote device """ @@ -801,10 +819,24 @@ def _connect(self, result=None): self._snmpProxy is None or self._snmpProxy._snmpConnInfo != self._snmpConnInfo ): - self._snmpProxy = self._snmpConnInfo.createSession( - protocol=self._snmpPort.protocol, allowCache=True - ) - self._snmpProxy.open() + try: + self._snmpProxy = self._snmpConnInfo.createSession( + protocol=self._snmpPort.protocol + ) + self._snmpProxy.open() + self._sendStatusEvent( + "SNMP config error cleared", + eventKey="snmp_config_error", + severity=Event.Clear, + ) + except Exception as ex: + self._close() + log.error("failed to create SNMP session: %s", ex) + self._sendStatusEvent( + "SNMP config error: {}".format(ex), + eventKey="snmp_config_error", + ) + raise return self._snmpProxy def _close(self): diff --git a/Products/ZenRRD/zenprocess.py b/Products/ZenRRD/zenprocess.py index 07e610a868..04ee1255b0 100755 --- a/Products/ZenRRD/zenprocess.py +++ b/Products/ZenRRD/zenprocess.py @@ -20,10 +20,11 @@ from pprint import pformat +import six import zope.component import zope.interface -from pynetsnmp.twistedsnmp import Snmpv3Error +from pynetsnmp.twistedsnmp import SnmpUsmError from twisted.internet import defer, error from Products.ZenCollector.daemon import CollectorDaemon @@ -59,7 +60,9 @@ unused(DeviceProxy, ProcessProxy, SnmpConnInfo) -log = logging.getLogger("zen.zenprocess") +COLLECTOR_NAME = "zenprocess" + +log = logging.getLogger("zen.{}".format(COLLECTOR_NAME)) # HOST-RESOURCES-MIB OIDs used HOSTROOT = ".1.3.6.1.2.1.25" @@ -76,6 +79,12 @@ PROC_SCAN_ERROR = "Unable to read processes on device %s" +RESOURCE_MIB = "resource_mib" +SNMP_CONFIG_ERROR = "snmp_config_error" +TABLE_SCAN_TIMEOUT = "table_scan_timeout" +TABLE_SCAN_V3_ERROR = "table_scan_v3_error" +PROCESS_STATUS = "process_status" + class HostResourceMIBException(Exception): pass @@ -91,7 +100,7 @@ def __init__(self): Constructs a new ZenProcessPreferences instance and provide default values for needed attributes. """ - self.collectorName = "zenprocess" + self.collectorName = COLLECTOR_NAME self.configCycleInterval = 20 # minutes # will be updated based on Performance Config property of same name @@ -361,7 +370,13 @@ def __init__( self._dataService = zope.component.queryUtility(IDataService) self._eventService = zope.component.queryUtility(IEventService) self._preferences = zope.component.queryUtility( - ICollectorPreferences, "zenprocess" + ICollectorPreferences, COLLECTOR_NAME + ) + self._snmpStatusEvent = dict( + self.statusEvent, + agent=COLLECTOR_NAME, + device=self._devId, + eventClass=Status_Snmp, ) self.snmpProxy = None self.snmpConnInfo = self._device.snmpConnInfo @@ -384,9 +399,21 @@ def doTask(self): """ try: # see if we need to connect first before doing any collection - self.openProxy() - log.debug("Opened proxy to %s [%s]", self._devId, self._manageIp) - yield self._collectCallback() + try: + self.openProxy() + self._clearSnmpError( + "SNMP config error cleared", SNMP_CONFIG_ERROR + ) + except Exception as ex: + log.error("failed to create SNMP session: %s", ex) + self._sendSnmpError( + "SNMP config error: {}".format(ex), SNMP_CONFIG_ERROR + ) + else: + log.debug( + "opened proxy to %s [%s]", self._devId, self._manageIp + ) + yield self._collectCallback() finally: self._finished() @@ -406,26 +433,26 @@ def _collectCallback(self): tableResult = yield self._getTables(tables) summary = "Process table up for device %s" % self._devId self._clearSnmpError( - "%s - timeout cleared" % summary, "table_scan_timeout" + "%s - timeout cleared" % summary, TABLE_SCAN_TIMEOUT ) if self.snmpConnInfo.zSnmpVer == "v3": self._clearSnmpError( - "%s - v3 error cleared" % summary, "table_scan_v3_error" + "%s - v3 error cleared" % summary, TABLE_SCAN_V3_ERROR ) processes = self._parseProcessNames(tableResult) - self._clearSnmpError(summary, "resource_mib") + self._clearSnmpError(summary, RESOURCE_MIB) self._deviceStats.update(self._device) processStatuses = self._determineProcessStatus(processes) self._sendProcessEvents(processStatuses) - self._clearSnmpError(summary) + self._clearSnmpError(summary, PROCESS_STATUS) yield self._fetchPerf() log.debug( "Device %s [%s] scanned successfully", self._devId, self._manageIp, ) - except HostResourceMIBException as e: + except HostResourceMIBException: summary = ( "Device %s does not publish HOST-RESOURCES-MIB" % self._devId ) @@ -434,29 +461,28 @@ def _collectCallback(self): NAMETABLE, ) log.warn(summary) - self._sendSnmpError(summary, "resource_mib", resolution=resolution) - - except error.TimeoutError as e: + self._sendSnmpError(summary, RESOURCE_MIB, resolution=resolution) + except error.TimeoutError: log.debug("Timeout fetching tables on device %s", self._devId) self._sendSnmpError( - "%s; Timeout on device" % PROC_SCAN_ERROR % self._devId, - "table_scan_timeout", + "%s; Timeout on device" % (PROC_SCAN_ERROR % self._devId,), + TABLE_SCAN_TIMEOUT, ) - except Snmpv3Error as e: + except SnmpUsmError as e: msg = ( - "Cannot connect to SNMP agent on {0._devId}: {1.value}".format( - self, str(e) + "Cannot connect to SNMP agent on {0._devId}: {1}".format( + self, e ) ) log.debug(msg) self._sendSnmpError( "%s; %s" % (PROC_SCAN_ERROR % self._devId, msg), - "table_scan_v3_error", + TABLE_SCAN_V3_ERROR, ) except Exception as e: log.exception("Unexpected Error on device %s", self._devId) msg = "%s; error: %s" % (PROC_SCAN_ERROR % self._devId, e) - self._sendSnmpError(msg) + self._sendSnmpError(msg, PROCESS_STATUS) def _finished(self): """ @@ -644,7 +670,7 @@ def _sendProcessEvents(self, results): self.sendMissingProcsEvents(missing) # Store the total number of each process into an RRD - pidCounts = dict((p, 0) for p in self._deviceStats.processStats) + pidCounts = {p: 0 for p in self._deviceStats.processStats} for procStat in self._deviceStats.monitoredProcs: # monitoredProcs is determined from the current pids in @@ -747,7 +773,7 @@ def _fetchPerf(self): ) result = yield self._get(oidChunk) results.update(result) - except (error.TimeoutError, Snmpv3Error) as e: + except (error.TimeoutError, SnmpUsmError) as e: log.debug("error reading oid(s) %s - %s", oidChunk, e) singleOids.update(oidChunk) oidsToTest = [] @@ -832,8 +858,12 @@ def openProxy(self): self.snmpProxy is None or self.snmpProxy.snmpConnInfo != self.snmpConnInfo ): - self.snmpProxy = self.snmpConnInfo.createSession() - self.snmpProxy.open() + try: + self.snmpProxy = self.snmpConnInfo.createSession() + self.snmpProxy.open() + except Exception: + self.snmpProxy = None + raise def _close(self): """ @@ -860,19 +890,14 @@ def _showProcessList(self, procs): "#===== Processes on %s:\n%s", device_name, "\n".join(proc_list) ) - def _sendSnmpError(self, message, eventKey=None, **kwargs): - event = self.statusEvent.copy() + def _sendSnmpError(self, message, eventKey, **kwargs): + event = self._snmpStatusEvent.copy() event.update(kwargs) self._eventService.sendEvent( - event, - eventClass=Status_Snmp, - device=self._devId, - severity=Event.Error, - eventKey=eventKey, - summary=message, + event, eventKey=eventKey, severity=Event.Error, summary=message ) - def _clearSnmpError(self, message, eventKey=None): + def _clearSnmpError(self, message, eventKey): """ Send an event to clear other events. @@ -880,13 +905,10 @@ def _clearSnmpError(self, message, eventKey=None): @type message: string """ self._eventService.sendEvent( - self.statusEvent, - eventClass=Status_Snmp, - device=self._devId, - summary=message, - agent="zenprocess", + self._snmpStatusEvent, eventKey=eventKey, severity=Event.Clear, + summary=message, ) def _save(self, pidName, statName, value, rrdType, min="U"): @@ -922,22 +944,20 @@ def _save(self, pidName, statName, value, rrdType, min="U"): trace_info = traceback.format_exc() self._eventService.sendEvent( - dict( - dedupid="%s|%s" - % ( - self._preferences.options.monitor, - "Metric write failure", + { + "dedupid": "{0.options.monitor}|{1}".format( + self._preferences, "Metric write failure" ), - severity=Event.Critical, - device=self._preferences.options.monitor, - eventClass=Status_Perf, - component="METRIC", - pidName=pidName, - statName=statName, - message=message, - traceback=trace_info, - summary=summary, - ) + "severity": Event.Critical, + "device": self._preferences.options.monitor, + "eventClass": Status_Perf, + "component": "METRIC", + "pidName": pidName, + "statName": statName, + "message": message, + "traceback": trace_info, + "summary": summary, + } ) @@ -982,7 +1002,7 @@ def extract(dictionary, oid, value): path = paths.get(pid, "") if path and path.find("\\") == -1: name = path - arg = unicode(args.get(pid, ""), errors="replace") + arg = six.text_type(args.get(pid, ""), errors="replace") procs.append((pid, (name + " " + arg).strip())) return procs diff --git a/Products/ZenRelations/ImportRM.py b/Products/ZenRelations/ImportRM.py index e079f7c76e..f93d0377b1 100644 --- a/Products/ZenRelations/ImportRM.py +++ b/Products/ZenRelations/ImportRM.py @@ -652,7 +652,7 @@ def __init__(self): self.infile = "" self.noCommit = True self.noindex = True - self.dataroot = "/zport/dmd" + self.zodb_dataroot = "/zport/dmd" class NoLoginImportRM(ImportRM): diff --git a/Products/ZenRelations/PrimaryPathObjectManager.py b/Products/ZenRelations/PrimaryPathObjectManager.py index c243a94dcb..a6fd22ba8a 100644 --- a/Products/ZenRelations/PrimaryPathObjectManager.py +++ b/Products/ZenRelations/PrimaryPathObjectManager.py @@ -94,9 +94,7 @@ class PrimaryPathObjectManager( PrimaryPathManager, App.Undo.UndoSupport, ): - """ - PrimaryPathObjectManager with basic Zope persistent classes. - """ + """PrimaryPathObjectManager with basic Zope persistent classes.""" manage_options = ( ObjectManager.manage_options diff --git a/Products/ZenRelations/ToManyContRelationship.py b/Products/ZenRelations/ToManyContRelationship.py index 15550bed7e..738f694280 100644 --- a/Products/ZenRelations/ToManyContRelationship.py +++ b/Products/ZenRelations/ToManyContRelationship.py @@ -247,7 +247,28 @@ def objectValues(self, spec=None): def objectValuesGen(self): """Generator that returns all related objects.""" - return (obj.__of__(self) for obj in self._objects.values()) + for obj in self._objects.values(): + try: + yield obj.__of__(self) + except Exception: + # If the object does not have an `__of__` method, don't + # trust that it has an `getPrimaryId` method. So, + # use `getPrimaryId` if it exists, otherwise, use the + # type of the object itself. + if hasattr(obj, "getPrimaryId"): + key = "primary-id" + value = obj.getPrimaryId() + else: + # Getting the type of an object that's been wrapped + # in an aquisition wrapper isn't helpful, so unwrap + # it first, but don't trust that `aq_base` will + # succeed. + key = "type" + try: + value = type(aq_base(obj)) + except Exception: + value = type(obj) + log.exception("failed to wrap object %s=%s", key, value) def objectItems(self, spec=None): """over ride to only return owned objects for many to many rel""" diff --git a/Products/ZenRelations/ToManyRelationship.py b/Products/ZenRelations/ToManyRelationship.py index 40d63fad8b..b12e79a750 100644 --- a/Products/ZenRelations/ToManyRelationship.py +++ b/Products/ZenRelations/ToManyRelationship.py @@ -8,7 +8,6 @@ ############################################################################## import logging -import sys from AccessControl import ClassSecurityInfo from AccessControl.class_init import InitializeClass @@ -17,7 +16,6 @@ from persistent.list import PersistentList from zExceptions import NotFound -from Products.ZenUtils.tbdetail import log_tb from Products.ZenUtils.Utils import getObjByPath, unused from .Exceptions import ObjectNotFound, RelationshipExistsError, zenmarker @@ -116,14 +114,15 @@ def _remoteRemove(self, obj=None): objs = self.objectValuesAll() remoteName = self.remoteName() for obj in objs: - rel = getattr(obj, remoteName) + remoteRel = getattr(obj, remoteName) try: - rel._remove(self.__primary_parent__) + remoteRel._remove(self.__primary_parent__) except ObjectNotFound: - message = log_tb(sys.exc_info()) - log.error( - 'Remote remove failed. Run "zenchkrels -r -x1". %s', - message, + log.debug( + "remote relation already removed " + "obj=%s remote-relation=%s", + self.__primary_parent__.getPrimaryId(), + remoteRel.getPrimaryId(), ) def _setObject(self, id, object, roles=None, user=None, set_owner=1): @@ -211,7 +210,7 @@ def _getCopy(self, container): rel.addRelation(robj) return rel - def exportXml(self, ofile, ignorerels=[]): + def exportXml(self, ofile, ignorerels=()): """Return an xml representation of a ToManyRelationship /Systems/OOL/Mail diff --git a/Products/ZenRelations/ToOneRelationship.py b/Products/ZenRelations/ToOneRelationship.py index 4e4a16588f..3ff71cfd44 100644 --- a/Products/ZenRelations/ToOneRelationship.py +++ b/Products/ZenRelations/ToOneRelationship.py @@ -14,7 +14,6 @@ """ import logging -import sys from AccessControl import ClassSecurityInfo from AccessControl.class_init import InitializeClass @@ -23,7 +22,6 @@ from App.special_dtml import DTMLFile from zExceptions import NotFound -from Products.ZenUtils.tbdetail import log_tb from Products.ZenUtils.Utils import getObjByPath from .Exceptions import ( @@ -126,10 +124,11 @@ def _remoteRemove(self, obj=None): try: remoteRel._remove(self.__primary_parent__) except ObjectNotFound: - message = log_tb(sys.exc_info()) - log.error( - 'Remote remove failed. Run "zenchkrels -r -x1". %s', - message, + log.debug( + "remote relation already removed " + "obj=%s remote-relation=%s", + self.__primary_parent__.getPrimaryId(), + remoteRel.getPrimaryId(), ) security.declareProtected("View", "getRelatedId") @@ -150,6 +149,9 @@ def _getCopy(self, container): rel = self.__class__(self.id) rel.__primary_parent__ = container rel = rel.__of__(container) + norelcopy = getattr(self, 'zNoRelationshipCopy', []) + if self.id in norelcopy: + return rel if self.remoteTypeName() == "ToMany" and self.obj: rel.addRelation(self.obj) return rel @@ -191,7 +193,7 @@ def getPrimaryHref(self): """Return the primary URL for our related object.""" return self.obj.getPrimaryUrlPath() - def exportXml(self, ofile, ignorerels=[]): + def exportXml(self, ofile, ignorerels=()): """return an xml representation of a ToOneRelationship /Monitors/Cricket/crk0.srv.hcvlny.cv.net @@ -200,10 +202,18 @@ def exportXml(self, ofile, ignorerels=[]): if not self.obj or self.remoteType() == ToManyCont: return - ofile.write( - "\n" - % (self.id, self.obj.getPrimaryId()) - ) + try: + ofile.write( + "\n" + % (self.id, self.obj.getPrimaryId()) + ) + except Exception: + log.exception( + "skipping %s object-type=%s object-id=%s", + self.id, + self.obj.__class__.__module__, + getattr(self.obj, "id", ""), + ) def checkRelation(self, repair=False): """Check to make sure that relationship bidirectionality is ok.""" diff --git a/Products/ZenRelations/ZenPropertyManager.py b/Products/ZenRelations/ZenPropertyManager.py index 1f88de36bf..f14c14a75d 100644 --- a/Products/ZenRelations/ZenPropertyManager.py +++ b/Products/ZenRelations/ZenPropertyManager.py @@ -25,6 +25,7 @@ ZEN_ZPROPERTIES_EDIT, ZEN_ZPROPERTIES_VIEW, ) +from Products.ZenUtils.snmp import authentication_protocols, privacy_protocols from Products.ZenUtils.Utils import unused, getDisplayType from Products.ZenWidgets.interfaces import IMessageSender @@ -44,6 +45,38 @@ # define all the zProperties. The values are set on dmd.Devices in the # buildDeviceTreeProperties of DeviceClass Z_PROPERTIES = [ + # Config Cache properties + ( + "zDeviceConfigBuildTimeout", + 7200, + "int", + "Device configuration build timeout", + "The number of seconds before timing out a device configuration build." + ), + ( + "zDeviceConfigPendingTimeout", + 7200, + "int", + "Device configuration build queued timeout", + "The number of seconds a device configuration build may be queued " + "before a timeout." + ), + ( + "zDeviceConfigTTL", + 43200, + "int", + "Device configuration expiration", + "The maximum number of seconds to wait before rebuilding a " + "device configuration." + ), + ( + "zDeviceConfigMinimumTTL", + 0, + "int", + "Device configuration pre-expiration window", + "The number of seconds the configuration is protected " + "from being rebuilt." + ), # zPythonClass maps device class to python classs (separate from device # class name) ( @@ -156,15 +189,15 @@ "zSnmpAuthType", "", "string", - "SNMP Auth Type", - 'Use "MD5" or "SHA" signatures to authenticate SNMP requests', + "SNMP Authentication Protocol", + 'The cryptographic protocol used to authenticate SNMP requests.', ), ( "zSnmpPrivType", "", "string", - "SNMP Priv Type", - '"DES" or "AES" cryptographic algorithms.', + "SNMP Privacy Protocol", + 'The cryptographic protocol used to encrypt SNMP packets.', ), ( "zSnmpContext", @@ -564,6 +597,14 @@ "Used by ZenPack authors to denote which zProperties comprise " "the credentials for this device class.", ), + ( + "zNoRelationshipCopy", + ["pack"], + "lines", + "Relations to skip during copying", + "Determine which relations should not being built during copying." + ) + ] @@ -668,18 +709,18 @@ class ZenPropertyManager(object, PropertyManager): the actual value the popup will have. It also has management for zenProperties which are properties that can be - inherited long the acquision chain. All properties are for a branch are - defined on a "root node" specified by the function which must be returned - by the function getZenRootNode that should be over ridden in a sub class. - Prperties can then be added further "down" the aq_chain by calling - setZenProperty on any contained node. + inherited along the acquisition chain. All properties are for a branch + are defined on a "root node" specified by the function which must be + returned by the function getZenRootNode that should be over ridden in a + sub class. Properties can then be added further "down" the aq_chain by + calling setZenProperty on any contained node. ZenProperties all have the same prefix which is defined by iszprop this can be overridden in a subclass. ZenPropertyManager overrides getProperty and getPropertyType from PropertyManager to support acquisition. If you want to query an object - about a property, but do not want it to search the acquistion chain then + about a property, but do not want it to search the acquisition chain then use the super classes method or aq_base. Example: # acquires property from dmd.Devices @@ -692,7 +733,7 @@ class ZenPropertyManager(object, PropertyManager): aq_base(dmd.Devices.Server).getProperty('zSnmpCommunity') The properties are stored as attributes which is convenient, but can be - confusing. Attribute access always uses acquistion. Setting an + confusing. Attribute access always uses acquisition. Setting an attribute, will not add it to the list of properties, so subsquent calls to hasProperty or getProperty won't return it. @@ -1012,9 +1053,22 @@ def deleteZenProperty(self, propname=None, REQUEST=None): security.declareProtected(ZEN_ZPROPERTIES_VIEW, "zenPropertyOptions") def zenPropertyOptions(self, propname): - """Provide a set of default options for a ZProperty.""" - unused(propname) - return [] + """ + Returns a list of possible options for a given zProperty + """ + if propname == "zCollectorPlugins": + from Products.DataCollector.Plugins import loadPlugins + + return tuple(sorted(p.pluginName for p in loadPlugins(self.dmd))) + if propname == "zCommandProtocol": + return ("ssh", "telnet") + if propname == "zSnmpVer": + return ("v1", "v2c", "v3") + if propname == "zSnmpAuthType": + return ("",) + authentication_protocols + if propname == "zSnmpPrivType": + return ("",) + privacy_protocols + return () security.declareProtected(ZEN_ZPROPERTIES_VIEW, "isLocal") @@ -1077,7 +1131,7 @@ def getProperty(self, id, d=None): security.declareProtected(ZEN_ZPROPERTIES_VIEW, "getPropertyType") def getPropertyType(self, id): - """Overrides methods from PropertyManager to support acquistion.""" + """Overrides methods from PropertyManager to support acquisition.""" ob = self._findParentWithProperty(id) if ob is not None: return PropertyManager.getPropertyType(ob, id) diff --git a/Products/ZenRelations/__init__.py b/Products/ZenRelations/__init__.py index 388fdf1e95..2e7538a438 100644 --- a/Products/ZenRelations/__init__.py +++ b/Products/ZenRelations/__init__.py @@ -7,12 +7,6 @@ # ############################################################################## -__doc__ = """__init__ - -Initialize the RelationshipManager Product - -""" - import logging from .RelationshipManager import ( @@ -20,20 +14,20 @@ manage_addRelationshipManager, RelationshipManager, ) -from .ToOneRelationship import ( - addToOneRelationship, - manage_addToOneRelationship, - ToOneRelationship, +from .ToManyContRelationship import ( + addToManyContRelationship, + manage_addToManyContRelationship, + ToManyContRelationship, ) from .ToManyRelationship import ( addToManyRelationship, manage_addToManyRelationship, ToManyRelationship, ) -from .ToManyContRelationship import ( - addToManyContRelationship, - manage_addToManyContRelationship, - ToManyContRelationship, +from .ToOneRelationship import ( + addToOneRelationship, + manage_addToOneRelationship, + ToOneRelationship, ) from .ZenPropertyManager import setDescriptors diff --git a/Products/ZenRelations/checkrel.py b/Products/ZenRelations/checkrel.py index a661e163b8..2ecfa61de7 100644 --- a/Products/ZenRelations/checkrel.py +++ b/Products/ZenRelations/checkrel.py @@ -7,6 +7,8 @@ # ############################################################################## +from __future__ import print_function + import logging import sys @@ -39,7 +41,7 @@ def checkRelationshipSchema(cls, baseModule): for relname, rel in cls._relations: try: remoteClass = importClass(rel.remoteClass, None) - except AttributeError as e: + except AttributeError: logging.critical( "RemoteClass '%s' from '%s.%s' not found", rel.remoteClass, @@ -49,7 +51,7 @@ def checkRelationshipSchema(cls, baseModule): continue try: rschema = lookupSchema(remoteClass, rel.remoteName) - except ZenSchemaError as e: + except ZenSchemaError: logging.critical( "Inverse def '%s' for '%s.%s' not found on '%s'", rel.remoteName, @@ -100,7 +102,11 @@ def checkRelationshipSchema(cls, baseModule): baseModule = None if len(sys.argv) > 1: - baseModule = sys.argv[1] + baseModule = sys.argv[1].strip() + +if not baseModule: + print("An argument is required", file=sys.stderr) + sys.exit(1) classList = importClasses( basemodule=baseModule, skipnames=("ZentinelPortal", "ZDeviceLoader") diff --git a/Products/ZenRelations/tests/testEvents.py b/Products/ZenRelations/tests/testEvents.py index 5c4c176de4..326996b6b5 100644 --- a/Products/ZenRelations/tests/testEvents.py +++ b/Products/ZenRelations/tests/testEvents.py @@ -24,6 +24,7 @@ BaseTestCase, init_model_catalog_for_tests, ) +from Products.ZenUtils.ZenDocTest import load_unittest_site class EventLogger(object): @@ -54,7 +55,6 @@ class ITestItem(interface.Interface): @interface.implementer(ITestItem, IItem) class TestItem(RelationshipManager): - def __init__(self, id): self.id = id self.buildRelations() @@ -96,7 +96,7 @@ class EventLayer(ZopeLite): def setUp(cls): import Products # noqa F401 - zcml.load_site(force=True) + load_unittest_site(force=True) setHooks() # Register Model Catalog related stuff diff --git a/Products/ZenRelations/zPropertyCategory.py b/Products/ZenRelations/zPropertyCategory.py index 77bf4adefe..6f0b77a669 100644 --- a/Products/ZenRelations/zPropertyCategory.py +++ b/Products/ZenRelations/zPropertyCategory.py @@ -42,6 +42,13 @@ "zCommandUsername": "zencommand", "zKeyPath": "zencommand", # + # Configuration Cache + # ------------------- + "zDeviceConfigTTL": "Config Cache", + "zDeviceConfigMinimumTTL": "Config Cache", + "zDeviceConfigBuildTimeout": "Config Cache", + "zDeviceConfigPendingTimeout": "Config Cache", + # # Misc # --------- "zDeviceTemplates": "Misc", @@ -54,6 +61,7 @@ "zPythonClass": "Misc", "zStatusConnectTimeout": "Misc", "zStatusEventClass": "Misc", + "zNoRelationshipCopy": "Misc", # # SNMP # ---------- diff --git a/Products/ZenReports/plugins/interface.py b/Products/ZenReports/plugins/interface.py index 8934f144f3..053512c1b3 100644 --- a/Products/ZenReports/plugins/interface.py +++ b/Products/ZenReports/plugins/interface.py @@ -18,7 +18,7 @@ class interface(AliasPlugin): - "The interface usage report" + """The interface usage report""" def getComponentPath(self): return "os/interfaces" @@ -52,7 +52,7 @@ def getColumns(self): Column( "tmp_ipAddress", PythonColumnHandler( - 'component.ipaddresses()[0].id if ' + "component.ipaddresses()[0].id if " 'len(component.ipaddresses()) == 1 else ""' ), ), @@ -96,7 +96,7 @@ def getCompositeColumns(self): Column( "totalBits", PythonColumnHandler( - '(input + output) * 8 if input is not None and ' + "(input + output) * 8 if input is not None and " 'output is not None else "N/A"' ), ), diff --git a/Products/ZenReports/tests/test_report_loader.py b/Products/ZenReports/tests/test_report_loader.py index 6a5488016b..ffb5a31b24 100644 --- a/Products/ZenReports/tests/test_report_loader.py +++ b/Products/ZenReports/tests/test_report_loader.py @@ -9,99 +9,112 @@ from Products.ZenTestCase.BaseTestCase import BaseTestCase -from mock import Mock, MagicMock, create_autospec, patch +from mock import Mock, create_autospec, patch from Products.ZenReports.ReportLoader import ( zenPath, - transaction, ReportLoader, - Report + Report, ) class ReportLoaderTest(BaseTestCase): - def setUp(self): - self.rp_load = ReportLoader() + self.rp_load = ReportLoader() def test_loadDatabase(self): - self.rp_load.loadAllReports = create_autospec(self.rp_load.loadAllReports) + self.rp_load.loadAllReports = create_autospec( + self.rp_load.loadAllReports + ) self.rp_load.loadDatabase() self.rp_load.loadAllReports.assert_called_once_with() @patch( - 'Products.ZenReports.ReportLoader.transaction.commit', - autospec=True, spec_set=True + "Products.ZenReports.ReportLoader.transaction.commit", + autospec=True, + spec_set=True, ) def test_loadAllReports(self, commit): - repdir = zenPath('Products/ZenReports', self.rp_load.options.dir) - self.rp_load.loadDirectory = create_autospec(self.rp_load.loadDirectory) + repdir = zenPath("Products/ZenReports", self.rp_load.options.dir) + self.rp_load.loadDirectory = create_autospec( + self.rp_load.loadDirectory + ) self.rp_load.loadAllReports() self.rp_load.loadDirectory.assert_called_once_with(repdir) commit.assert_called_once_with() - def test_loadAllReports_zp(self): + def test_loadAllReports_zp(self): self.rp_load.options.zenpack = True - self.rp_load.getZenPackDirs = create_autospec(self.rp_load.getZenPackDirs) + self.rp_load.getZenPackDirs = create_autospec( + self.rp_load.getZenPackDirs + ) self.rp_load.loadAllReports() - self.rp_load.getZenPackDirs.assert_called_once_with(self.rp_load.options.zenpack) + self.rp_load.getZenPackDirs.assert_called_once_with( + self.rp_load.options.zenpack + ) def test_getZenPackDirs(self): - zp_name = 'test_zp' - zp_path = '/path/to/test_zp' - zp_obj = Mock(id='test_zp') + zp_name = "test_zp" + zp_path = "/path/to/test_zp" + zp_obj = Mock(id="test_zp") zp_obj.path = Mock(return_value=zp_path) self.rp_load.dmd.ZenPackManager.packs = create_autospec( - self.rp_load.dmd.ZenPackManager.packs, - return_value=[zp_obj] + self.rp_load.dmd.ZenPackManager.packs, return_value=[zp_obj] ) - self.rp_load.options.dir = 'reports' - zp_dir_result = ['/path/to/test_zp/reports'] + self.rp_load.options.dir = "reports" + zp_dir_result = ["/path/to/test_zp/reports"] result = self.rp_load.getZenPackDirs(name=zp_name) self.assertEqual(result, zp_dir_result) self.assertIsInstance(result, list) self.assertEqual(len(result), 1) def test_getZenPackDirs_error(self): - zp_name = 'noname_zp' - zp_path = '/path/to/test_zp' - zp_obj = Mock(id='test_zp') + zp_name = "noname_zp" + zp_path = "/path/to/test_zp" + zp_obj = Mock(id="test_zp") zp_obj.path = Mock(return_value=zp_path) self.rp_load.dmd.ZenPackManager.packs = create_autospec( - self.rp_load.dmd.ZenPackManager.packs, - return_value=[zp_obj] + self.rp_load.dmd.ZenPackManager.packs, return_value=[zp_obj] ) - self.rp_load.options.dir = 'reports' - #set loglevel to 50(CRITICAL) this will remove error log - self.rp_load.log.setLevel('CRITICAL') + self.rp_load.options.dir = "reports" + # set loglevel to 50(CRITICAL) this will remove error log + self.rp_load.log.setLevel("CRITICAL") with self.assertRaises(SystemExit) as exc: self.rp_load.getZenPackDirs(name=zp_name) self.assertEqual(exc.exception.code, 1) @patch( - 'Products.ZenReports.ReportLoader.os.walk', - autospec=True, spec_set=True + "Products.ZenReports.ReportLoader.os.walk", + autospec=True, + spec_set=True, ) def test_reports(self, walk): - rp_dir = '/path/to/test_zp/reports/SomeReports' - os_walk_data = [(rp_dir, [], ['reportName.rpt'])] + rp_dir = "/path/to/test_zp/reports/SomeReports" + os_walk_data = [(rp_dir, [], ["reportName.rpt"])] walk.return_value = os_walk_data - ret_data = [('/SomeReports', - 'reportName', '/path/to/test_zp/reports/SomeReports/reportName.rpt' - )] + ret_data = [ + ( + "/SomeReports", + "reportName", + "/path/to/test_zp/reports/SomeReports/reportName.rpt", + ) + ] result = self.rp_load.reports(rp_dir) self.assertEqual(result, ret_data) - def test_unloadDirectory(self): - rp_dir = '/path/to/test_zp/reports/SomeReports' - orgpath = '/SomeReports' - rp_id = 'reportName' - report_data = [(orgpath, - rp_id, '/path/to/test_zp/Reports/SomeReports/reportName.rpt' - )] + rp_dir = "/path/to/test_zp/reports/SomeReports" + orgpath = "/SomeReports" + rp_id = "reportName" + report_data = [ + ( + orgpath, + rp_id, + "/path/to/test_zp/Reports/SomeReports/reportName.rpt", + ) + ] rorg = Mock(id=rp_id) - rorg_parent = Mock(id='Reports') + rorg_parent = Mock(id="Reports") setattr(rorg, rp_id, True) rorg._delObject = Mock() rorg.objectValues = Mock(return_value=False) @@ -111,23 +124,27 @@ def test_unloadDirectory(self): self.rp_load.unloadDirectory(repdir=rp_dir) - self.rp_load.dmd.Reports.createOrganizer.assert_called_once_with(orgpath) + self.rp_load.dmd.Reports.createOrganizer.assert_called_once_with( + orgpath + ) rorg._delObject.assert_called_with(rp_id) rorg.objectValues.assert_called_once_with() rorg.getPrimaryParent.assert_called_once_with() - def test_unloadDirectory_false(self): - '''test that _delObject method was not called - ''' - rp_dir = '/path/to/test_zp/reports/SomeReports' - orgpath = '/SomeReports' - rp_id = 'reportName' - report_data = [(orgpath, - rp_id, '/path/to/test_zp/Reports/SomeReports/reportName.rpt' - )] - rorg = Mock(id='Reports') - rorg_parent = Mock(id='Reports') + """test that _delObject method was not called""" + rp_dir = "/path/to/test_zp/reports/SomeReports" + orgpath = "/SomeReports" + rp_id = "reportName" + report_data = [ + ( + orgpath, + rp_id, + "/path/to/test_zp/Reports/SomeReports/reportName.rpt", + ) + ] + rorg = Mock(id="Reports") + rorg_parent = Mock(id="Reports") rorg._delObject = Mock() rorg.objectValues = Mock(return_value=False) rorg.getPrimaryParent = Mock(return_value=rorg_parent) @@ -139,17 +156,16 @@ def test_unloadDirectory_false(self): rorg._delObject.assert_not_called() - def test_loadDirectory_force(self): - full_path = '/path/to/test_zp/Reports/SomeReports/reportName.rpt' - rp_dir = '/path/to/test_zp/reports/SomeReports' - orgpath = '/SomeReports' - rp_id = 'reportName' + full_path = "/path/to/test_zp/Reports/SomeReports/reportName.rpt" + rp_dir = "/path/to/test_zp/reports/SomeReports" + orgpath = "/SomeReports" + rp_id = "reportName" report_data = [(orgpath, rp_id, full_path)] self.rp_load.options.force = True rorg = Mock() report = Mock() - #set that this report is not from zenpack + # set that this report is not from zenpack report.pack = Mock(return_value=False) setattr(rorg, rp_id, report) rorg._delObject = Mock() @@ -160,20 +176,22 @@ def test_loadDirectory_force(self): self.rp_load.loadDirectory(rp_dir) - self.rp_load.dmd.Reports.createOrganizer.assert_called_once_with(orgpath) + self.rp_load.dmd.Reports.createOrganizer.assert_called_once_with( + orgpath + ) rorg._delObject.assert_called_once_with(rp_id) self.rp_load.loadFile.assert_called_with(rorg, rp_id, full_path) - def test_loadDirectory(self): - '''test that _delObject method was not called and we didn't overwrite reports - ''' - full_path = '/path/to/test_zp/Reports/SomeReports/reportName.rpt' - rp_dir = '/path/to/test_zp/reports/SomeReports' - orgpath = '/SomeReports' - rp_id = 'reportName' + """ + Test that _delObject method was not called reports wasn't overwritten. + """ + full_path = "/path/to/test_zp/Reports/SomeReports/reportName.rpt" + rp_dir = "/path/to/test_zp/reports/SomeReports" + orgpath = "/SomeReports" + rp_id = "reportName" report_data = [(orgpath, rp_id, full_path)] - #force option is False by default, this is for better clarity + # force option is False by default, this is for better clarity self.rp_load.options.force = False rorg = Mock() setattr(rorg, rp_id, True) @@ -188,15 +206,12 @@ def test_loadDirectory(self): rorg._delObject.assert_not_called() self.rp_load.loadFile.assert_not_called() - @patch( - '__builtin__.file', - autospec=True, spec_set=True - ) + @patch("__builtin__.file", autospec=True, spec_set=True) def test_loadFile(self, file_mock): - rp_name = 'reportName' - full_rp_path = '/path/to/test_zp/Reports/SomeReports/reportName.rpt' + rp_name = "reportName" + full_rp_path = "/path/to/test_zp/Reports/SomeReports/reportName.rpt" report_txt = "some report data" - #mock build in file method and its instance read method + # mock build in file method and its instance read method file_read = Mock() file_read.read = Mock(return_value=report_txt) file_mock.return_value = file_read @@ -206,4 +221,3 @@ def test_loadFile(self, file_mock): self.assertIsInstance(rp, Report) self.assertEqual(rp.id, rp_name) root._setObject.assert_called_once_with(rp_name, rp) - \ No newline at end of file diff --git a/Products/ZenStatus/nmap/util.py b/Products/ZenStatus/nmap/util.py index a93e40b0ff..a9e1f5aba2 100644 --- a/Products/ZenStatus/nmap/util.py +++ b/Products/ZenStatus/nmap/util.py @@ -24,7 +24,7 @@ MAX_NMAP_OVERHEAD = 0.5 # in seconds MIN_PING_TIMEOUT = 0.1 # in seconds -_NMAP_BINARY = "/usr/bin/nmap" +_NMAP_BINARY = "/opt/zenoss/bin/nmap" @defer.inlineCallbacks @@ -135,7 +135,7 @@ def executeNmapCmd( if log.isEnabledFor(logging.DEBUG): log.debug("executing nmap %s", " ".join(args)) args = ["-n", _NMAP_BINARY] + args - log.info("Executing /bin/sudo %s", " ".join(args)) + log.debug("Executing /bin/sudo %s", " ".join(args)) out, err, exitCode = yield utils.getProcessOutputAndValue( "/bin/sudo", args ) diff --git a/Products/ZenStatus/ping/CmdPingTask.py b/Products/ZenStatus/ping/CmdPingTask.py index fad7f149e2..0c2e65e0d1 100644 --- a/Products/ZenStatus/ping/CmdPingTask.py +++ b/Products/ZenStatus/ping/CmdPingTask.py @@ -52,7 +52,7 @@ def _detectPing(): log.info("ping6 not found in path") _PING_ARG_TEMPLATE = ( - "%(ping)s -n -s %(datalength)d -c 1 -t %(ttl)d -w %(timeout)f %(ip)s" + "%(ping)s -n -s %(datalength)d -c 1 -t %(ttl)d -w %(timeout)d %(ip)s" ) import platform @@ -61,7 +61,7 @@ def _detectPing(): log.info("Mac OS X detected; adjusting ping args.") _PING_ARG_TEMPLATE = ( "%(ping)s -n -s %(datalength)d -c 1 " - "-m %(ttl)d -t %(timeout)f %(ip)s" + "-m %(ttl)d -t %(timeout)d %(ip)s" ) elif system != "Linux": log.info( @@ -150,7 +150,7 @@ def _pingIp(self): ip=self.config.ip, version=self.config.ipVersion, ttl=64, - timeout=float(self._preferences.pingTimeOut), + timeout=int(self._preferences.pingTimeOut), datalength=self._daemon.options.dataLength if self._daemon.options.dataLength > 16 else 16, diff --git a/Products/ZenStatus/zenping.py b/Products/ZenStatus/zenping.py index 87461c0971..bcb07f11b5 100644 --- a/Products/ZenStatus/zenping.py +++ b/Products/ZenStatus/zenping.py @@ -45,8 +45,8 @@ class PingDaemon(CollectorDaemon): - def runPostConfigTasks(self, result=None): - CollectorDaemon.runPostConfigTasks(self, result=result) + def runPostConfigTasks(self): + super(PingDaemon, self).runPostConfigTasks() self.preferences.runPostConfigTasks() diff --git a/Products/ZenTestCase/BaseTestCase.py b/Products/ZenTestCase/BaseTestCase.py index bf43eb6317..450950d892 100644 --- a/Products/ZenTestCase/BaseTestCase.py +++ b/Products/ZenTestCase/BaseTestCase.py @@ -1,51 +1,47 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import logging import zope.component -import zenoss.modelindex.api -from zope.traversing.adapters import DefaultTraversable -from transaction._transaction import Transaction from Testing import ZopeTestCase from Testing.ZopeTestCase.layer import ZopeLite - +from transaction._transaction import Transaction +from zenoss.modelindex.model_index import SearchParams from Zope2.App import zcml +from zope.testing.cleanup import cleanUp +from zope.traversing.adapters import DefaultTraversable -from Products.ZenModel.DmdBuilder import DmdBuilder -from Products.ZenModel.ZentinelPortal import PortalGenerator from Products.ZenEvents.EventManagerBase import EventManagerBase +from Products.ZenEvents.MySqlEventManager import log from Products.ZenEvents.MySqlSendEvent import MySqlSendEventMixin +from Products.ZenModel.DmdBuilder import DmdBuilder +from Products.ZenModel.ZentinelPortal import PortalGenerator from Products.ZenRelations.ZenPropertyManager import setDescriptors -from Products.ZenEvents.MySqlEventManager import log from Products.ZenUtils.Utils import unused, load_config_override -from zope.testing.cleanup import cleanUp - from Products.Zuul.catalog.model_catalog import get_solr_config -from zenoss.modelindex.model_index import SearchParams log.warn = lambda *args, **kwds: None # setup the Products needed for the Zenoss test instance -ZopeTestCase.installProduct('ZenModel', 1) -ZopeTestCase.installProduct('ZCatalog', 1) -ZopeTestCase.installProduct('OFolder', 1) -ZopeTestCase.installProduct('ManagableIndex', 1) -ZopeTestCase.installProduct('AdvancedQuery', 1) -ZopeTestCase.installProduct('ZCTextIndex', 1) -ZopeTestCase.installProduct('CMFCore', 1) -ZopeTestCase.installProduct('CMFDefault', 1) -ZopeTestCase.installProduct('MailHost', 1) -ZopeTestCase.installProduct('Transience', 1) -ZopeTestCase.installProduct('ZenRelations', 1) +ZopeTestCase.installProduct("ZenModel", 1) +ZopeTestCase.installProduct("ZCatalog", 1) +ZopeTestCase.installProduct("OFolder", 1) +ZopeTestCase.installProduct("ManagableIndex", 1) +ZopeTestCase.installProduct("AdvancedQuery", 1) +ZopeTestCase.installProduct("ZCTextIndex", 1) +ZopeTestCase.installProduct("CMFCore", 1) +ZopeTestCase.installProduct("CMFDefault", 1) +ZopeTestCase.installProduct("MailHost", 1) +ZopeTestCase.installProduct("Transience", 1) +ZopeTestCase.installProduct("ZenRelations", 1) def manage_addDummyManager(context, id): @@ -56,34 +52,63 @@ def manage_addDummyManager(context, id): class DummyCursor(object): - def __init__(self, *args, **kwds): pass - def execute(self, *args, **kwds): pass + def __init__(self, *args, **kwds): + pass + + def execute(self, *args, **kwds): + pass class DummyConnection(object): - def __init__(self, *args, **kwds): pass + def __init__(self, *args, **kwds): + pass + def cursor(self): return DummyCursor() - def close(self): pass + + def close(self): + pass class DummyManager(MySqlSendEventMixin, EventManagerBase): - __pychecker__ = 'no-override' def __init__(self, *args, **kwds): EventManagerBase.__init__(self, *args, **kwds) + def connect(self, *args, **kwds): unused(args, kwds) return DummyConnection() - def sendEvent(self, *args, **kwds): unused(args, kwds) - def sendEvents(self, *args, **kwds): unused(args, kwds) - def doSendEvent(self, *args, **kwds): unused(args, kwds) - def getEventSummary(self, *args, **kwds): unused(args, kwds) - def getEventDetail(self, *args, **kwds): unused(args, kwds) - def getDeviceIssues(self, *args, **kwds): unused(args, kwds) - def getHeartbeat(self, *args, **kwds): unused(args, kwds) - def getEventList(self, *args, **kwds): unused(args, kwds); return [] - def applyEventContext(self, evt): return evt - def applyDeviceContext(self, dev, evt): unused(dev); return evt + + def sendEvent(self, *args, **kwds): + unused(args, kwds) + + def sendEvents(self, *args, **kwds): + unused(args, kwds) + + def doSendEvent(self, *args, **kwds): + unused(args, kwds) + + def getEventSummary(self, *args, **kwds): + unused(args, kwds) + + def getEventDetail(self, *args, **kwds): + unused(args, kwds) + + def getDeviceIssues(self, *args, **kwds): + unused(args, kwds) + + def getHeartbeat(self, *args, **kwds): + unused(args, kwds) + + def getEventList(self, *args, **kwds): + unused(args, kwds) + return [] + + def applyEventContext(self, evt): + return evt + + def applyDeviceContext(self, dev, evt): + unused(dev) + return evt def reset_model_catalog(): @@ -91,13 +116,22 @@ def reset_model_catalog(): Deletes temporary documents from previous tests. They should be cleaned by abort() but just in case """ - model_index = zope.component.createObject('ModelIndex', get_solr_config(test=True)) + model_index = zope.component.createObject( + "ModelIndex", get_solr_config(test=True) + ) model_index.unindex_search(SearchParams(query="NOT tx_state:0")) def init_model_catalog_for_tests(): - from Products.Zuul.catalog.model_catalog import register_model_catalog, register_data_manager_factory - from zenoss.modelindex.api import _register_factories, reregister_subscriptions + from Products.Zuul.catalog.model_catalog import ( + register_model_catalog, + register_data_manager_factory, + ) + from zenoss.modelindex.api import ( + _register_factories, + reregister_subscriptions, + ) + _register_factories() register_model_catalog(test=True) register_data_manager_factory(test=True) @@ -106,31 +140,36 @@ def init_model_catalog_for_tests(): class ZenossTestCaseLayer(ZopeLite): - @classmethod def testSetUp(cls): import Products zope.component.testing.setUp(cls) zope.component.provideAdapter(DefaultTraversable, (None,)) - zcml.load_config('testing.zcml', Products.ZenTestCase) + zcml.load_config("testing.zcml", Products.ZenTestCase) import Products.ZenMessaging.queuemessaging - load_config_override('nopublisher.zcml', Products.ZenMessaging.queuemessaging) + + load_config_override( + "nopublisher.zcml", Products.ZenMessaging.queuemessaging + ) # Have to force registering these as they are torn down between tests from zenoss.protocols.adapters import registerAdapters + registerAdapters() # Register Model Catalog related stuff init_model_catalog_for_tests() from twisted.python.runtime import platform + platform.supportsThreads_orig = platform.supportsThreads - platform.supportsThreads = lambda : None + platform.supportsThreads = lambda: None @classmethod def testTearDown(cls): from twisted.python.runtime import platform + platform.supportsThreads = platform.supportsThreads_orig cleanUp() @@ -148,32 +187,42 @@ def afterSetUp(self): logging.disable(logging.CRITICAL) gen = PortalGenerator() - if hasattr( self.app, 'zport' ): - self.app._delObject( 'zport', suppress_events=True) + if hasattr(self.app, "zport"): + self.app._delObject("zport", suppress_events=True) - gen.create(self.app, 'zport', True) + gen.create(self.app, "zport", True) # builder params: # portal, cvthost, evtuser, evtpass, evtdb, # smtphost, smtpport, pagecommand - builder = DmdBuilder(self.app.zport, 'localhost', 'zenoss', 'zenoss', - 'events', 3306, 'localhost', '25', '') + builder = DmdBuilder( + self.app.zport, + "localhost", + "zenoss", + "zenoss", + "events", + 3306, + "localhost", + "25", + "", + ) builder.build() self.dmd = builder.dmd - self.dmd.ZenUsers.manage_addUser('tester', roles=('Manager',)) - user = self.app.zport.acl_users.getUserById('tester') + self.dmd.ZenUsers.manage_addUser("tester", roles=("Manager",)) + user = self.app.zport.acl_users.getUserById("tester") from AccessControl.SecurityManagement import newSecurityManager + newSecurityManager(None, user) # Let's hide transaction.commit() so that tests don't fubar # each other self._transaction_commit = Transaction.commit - Transaction.commit=lambda *x: None + Transaction.commit = lambda *x: None setDescriptors(self.dmd) def beforeTearDown(self): - if hasattr( self, '_transaction_commit' ): - Transaction.commit=self._transaction_commit + if hasattr(self, "_transaction_commit"): + Transaction.commit = self._transaction_commit self.app = None self.dmd = None diff --git a/Products/ZenTestCase/__init__.py b/Products/ZenTestCase/__init__.py index de5b4971fc..8f3a86088f 100644 --- a/Products/ZenTestCase/__init__.py +++ b/Products/ZenTestCase/__init__.py @@ -1,11 +1,8 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - - - diff --git a/Products/ZenUI3/browser/__init__.py b/Products/ZenUI3/browser/__init__.py index cb5ae18de3..81ad777c10 100644 --- a/Products/ZenUI3/browser/__init__.py +++ b/Products/ZenUI3/browser/__init__.py @@ -1,52 +1,54 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -from AccessControl import getSecurityManager import sys + +from AccessControl import getSecurityManager, Unauthorized from Products.Five.browser import BrowserView, pagetemplatefile -from zope.viewlet.interfaces import IViewletManager from zope.component import queryMultiAdapter -from AccessControl import Unauthorized +from zope.viewlet.interfaces import IViewletManager + class MainPageRedirect(BrowserView): def __call__(self): - self.request.response.redirect('/zport/dmd/dashboard') + self.request.response.redirect("/zport/dmd/dashboard") class ErrorMessage(BrowserView): - def _get_viewlets(self, provider, role="_ZenCommon_Permission"): """ In the case of a NotFound, there is no authenticated user available, as - a last resort check session if we have user logged in and if not raise Unauthorize. + a last resort check session if we have user logged in and if not + raise Unauthorize. + In the case of another exception type, we have the user, but security isn't entirely set up correctly, so we have to make this view appear to be in the context (it actually has none, not being an Acquisition.Implicit). - The upshot of all this is that for exceptions, nav will appear only for authenticated - user. + The upshot of all this is that for exceptions, nav will appear only + for authenticated user. """ # Check to see if we're authenticated userid = getSecurityManager().getUser().getId() if userid is None: # Not authenticated, check session if we have user logged in - if not self.request.SESSION.get('__ac_logged_as'): + if not self.request.SESSION.get("__ac_logged_as"): raise Unauthorized - self.__ac_local_roles__ = {userid:[role]} + self.__ac_local_roles__ = {userid: [role]} else: # Authenticated, force this view to be in a working context self._parent = self.dmd # Look up the viewlets - mgr = queryMultiAdapter((self.dmd, self.request, self), - IViewletManager, provider) + mgr = queryMultiAdapter( + (self.dmd, self.request, self), IViewletManager, provider + ) # Activate the viewlets mgr.update() if userid is None: @@ -54,8 +56,8 @@ def _get_viewlets(self, provider, role="_ZenCommon_Permission"): # so the manual permission checking in # ZenUI3.navigation.menuitem.PrimaryNavigationMenuItem.render will # cause nothing to be returned. Short-circuit permissions, since - # dmd doesn't have them set anyway. Since we're already using the most - # restrictive authenticated role there is, this won't cause + # dmd doesn't have them set anyway. Since we're already using the + # most restrictive authenticated role there is, this won't cause # anything to display that shouldn't. for viewlet in mgr.viewlets: viewlet.__ac_permissions__ = None @@ -63,19 +65,19 @@ def _get_viewlets(self, provider, role="_ZenCommon_Permission"): @property def headExtra(self): - return self._get_viewlets('head-extra') + return self._get_viewlets("head-extra") @property def primaryNav(self): - return self._get_viewlets('primarynav') + return self._get_viewlets("primarynav") @property def secondaryNav(self): - return self._get_viewlets('secondarynav') + return self._get_viewlets("secondarynav") @property def dmd(self): - return self.request.other['PARENTS'][-1].zport.dmd + return self.request.other["PARENTS"][-1].zport.dmd @property def instanceIdentifier(self): @@ -87,18 +89,15 @@ def zenossVersion(self): @property def isNotFound(self): - return self.context.__class__.__name__=='NotFound' + return self.context.__class__.__name__ == "NotFound" @property def error_message(self): t, v, tb = sys.exc_info() return self.dmd.zenoss_error_message( - error_type=t, - error_value=v, - error_traceback=tb, - error_message=v + error_type=t, error_value=v, error_traceback=tb, error_message=v ) def __call__(self): - t = pagetemplatefile.ViewPageTemplateFile('error_message.pt') + t = pagetemplatefile.ViewPageTemplateFile("error_message.pt") return t(self) diff --git a/Products/ZenUI3/browser/backcompat.py b/Products/ZenUI3/browser/backcompat.py index a9a298cc42..dcb3ca66ad 100644 --- a/Products/ZenUI3/browser/backcompat.py +++ b/Products/ZenUI3/browser/backcompat.py @@ -10,96 +10,120 @@ def getImmediateView(ob): if hasattr(ob, "factory_type_information"): - return ob.factory_type_information[0]['immediate_view'] + return ob.factory_type_information[0]["immediate_view"] else: - raise NameError('Cannot find default view for "%s"' % - '/'.join(ob.getPhysicalPath())) + raise NameError( + 'Cannot find default view for "%s"' + % "/".join(ob.getPhysicalPath()) + ) def immediate_view(ob): view = getImmediateView(ob) path = ob.getPhysicalPath() + (view,) - return '/'.join(path) + return "/".join(path) + def Device(ob): - id = '/'.join(ob.getPhysicalPath()) - REQUEST = getattr(ob, 'REQUEST', {}) - return id + '/devicedetail#' + REQUEST.get('fragment', 'deviceDetailNav:device_overview') + id = "/".join(ob.getPhysicalPath()) + REQUEST = getattr(ob, "REQUEST", {}) + return ( + id + + "/devicedetail#" + + REQUEST.get("fragment", "deviceDetailNav:device_overview") + ) + def EventClass(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/Events/eventclasses#classes:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/Events/eventclasses#classes:" + id + def DeviceClass(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/itinfrastructure#devices:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/itinfrastructure#devices:" + id + def Manufacturer(ob): - return 'zport/dmd/manufacturers#manufacturers_tree:.zport.dmd.Manufacturers.' + ob.id + return ( + "zport/dmd/manufacturers#manufacturers_tree:.zport.dmd.Manufacturers." + + ob.id + ) + def ProductClass(ob): mId = ob.manufacturer().id - id = '/'.join(ob.getPhysicalPath()) - return 'zport/dmd/manufacturers#manufacturers_tree:.zport.dmd.Manufacturers.' + mId + ':' + id + id = "/".join(ob.getPhysicalPath()) + return ( + "zport/dmd/manufacturers#manufacturers_tree:.zport.dmd.Manufacturers." + + mId + + ":" + + id + ) def Location(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/itinfrastructure#locs:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/itinfrastructure#locs:" + id def System(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/itinfrastructure#systemsTree:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/itinfrastructure#systemsTree:" + id def DeviceGroup(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/itinfrastructure#groups:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/itinfrastructure#groups:" + id def IpNetwork(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/networks#networks:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/networks#networks:" + id def DeviceComponent(ob): devpath = ob.device().getPrimaryUrlPath() - return ':'.join([devpath+'/devicedetail#deviceDetailNav', ob.meta_type, - ob.getPrimaryUrlPath()]) + return ":".join( + [ + devpath + "/devicedetail#deviceDetailNav", + ob.meta_type, + ob.getPrimaryUrlPath(), + ] + ) def Process(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/process#processTree:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/process#processTree:" + id def Service(ob): - id = '.'.join(ob.getPhysicalPath()) - if id.startswith('.zport.dmd.Services.WinService'): - return '/zport/dmd/winservice#navTree:' + id - return '/zport/dmd/ipservice#navTree:' + id + id = ".".join(ob.getPhysicalPath()) + if id.startswith(".zport.dmd.Services.WinService"): + return "/zport/dmd/winservice#navTree:" + id + return "/zport/dmd/ipservice#navTree:" + id def MonitoringTemplate(ob): - ''' + """ Templates for devices are in the new Monitoring Templates screen. Collector templates however, are still edited in the old style. - ''' - id = '/'.join(ob.getPhysicalPath()) - if id.startswith('/zport/dmd/Devices'): - return '/zport/dmd/template#templateTree:' + id + """ + id = "/".join(ob.getPhysicalPath()) + if id.startswith("/zport/dmd/Devices"): + return "/zport/dmd/template#templateTree:" + id view = getImmediateView(ob) - return '%s/%s' % (id, view) + return "%s/%s" % (id, view) def ReportClass(ob): - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/reports#reporttree:' + id + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/reports#reporttree:" + id def CustomReport(ob): - ''' + """ The reportmail utility needs to get at what is the content of the backcompat iframe on the reports screen, and existing reportmail setups exist that are sending out reports using the old urls with paths @@ -108,25 +132,36 @@ def CustomReport(ob): On the other hand, those same old model based urls exist in some places in the app (ZenPack provides table for instance) and need to take the user into the new reports screen. - ''' - if ob.REQUEST['QUERY_STRING'].find('adapt=false') != -1 or \ - ob.REQUEST['HTTP_REFERER'].find('/view' + ob.meta_type) != -1 : + """ + if ( + ob.REQUEST["QUERY_STRING"].find("adapt=false") != -1 + or ob.REQUEST["HTTP_REFERER"].find("/view" + ob.meta_type) != -1 + ): params = [] - for key in ob.REQUEST.form.keys() : - params.append('%s=%s' % (key, ob.REQUEST.form[key])) - return ob.absolute_url_path() + '/view' + ob.meta_type + \ - ('?' + '&'.join(params)) if params else '' - id = '.'.join(ob.getPhysicalPath()) - return '/zport/dmd/reports#reporttree:' + id + for key in ob.REQUEST.form.keys(): + params.append("%s=%s" % (key, ob.REQUEST.form[key])) + return ( + ob.absolute_url_path() + + "/view" + + ob.meta_type + + ("?" + "&".join(params)) + if params + else "" + ) + id = ".".join(ob.getPhysicalPath()) + return "/zport/dmd/reports#reporttree:" + id + def MibNode(ob): - id = '/'.join(ob.getPhysicalPath()).split('/nodes/')[0] - return '/zport/dmd/mibs#mibtree:' + id + id = "/".join(ob.getPhysicalPath()).split("/nodes/")[0] + return "/zport/dmd/mibs#mibtree:" + id + def MibNotification(ob): - id = '/'.join(ob.getPhysicalPath()).split('/notifications/')[0] - return '/zport/dmd/mibs#mibtree:' + id + id = "/".join(ob.getPhysicalPath()).split("/notifications/")[0] + return "/zport/dmd/mibs#mibtree:" + id + def MibClass(ob): - id = '/'.join(ob.getPhysicalPath()) - return '/zport/dmd/mibs#mibtree:' + id + id = "/".join(ob.getPhysicalPath()) + return "/zport/dmd/mibs#mibtree:" + id diff --git a/Products/ZenUI3/browser/command.py b/Products/ZenUI3/browser/command.py index 82a65e028e..7ec5c5f228 100644 --- a/Products/ZenUI3/browser/command.py +++ b/Products/ZenUI3/browser/command.py @@ -1,28 +1,28 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -import os import shlex -import sys -import traceback import subprocess -import signal +import sys import time +import traceback + from itertools import imap + from Products.ZenMessaging.audit import audit -from Products.ZenUI3.security.security import permissionsForContext from Products.ZenModel.ZenossSecurity import ZEN_RUN_COMMANDS -from Products.ZenUI3.browser.streaming import StreamingView, StreamClosed +from Products.ZenUI3.security.security import permissionsForContext from Products.ZenUtils.jsonutils import unjson from Products.Zuul import getFacade +from .streaming import StreamingView, StreamClosed + class CommandView(StreamingView): """ @@ -31,16 +31,22 @@ class CommandView(StreamingView): Designed to work in concert with the Ext component Zenoss.CommandWindow. """ + def stream(self): - data = unjson(self.request.get('data')) - command = self.context.getUserCommands(asDict=True).get(data['command'], None) + data = unjson(self.request.get("data")) + command = self.context.getUserCommands(asDict=True).get( + data["command"], None + ) if command: - for uid in data['uids']: + for uid in data["uids"]: target = self.context.unrestrictedTraverse(uid) if permissionsForContext(target)[ZEN_RUN_COMMANDS.lower()]: self.execute(command, target) else: - self.write('==== No permissions to run command %s for %s, skipping ===='.format(command, target)) + self.write( + "==== No permissions to run command %s for %s, " + "skipping ====" % (command, target) + ) def _get_printable_command(self, raw_command, compiled_command, target): """ @@ -49,8 +55,10 @@ def _get_printable_command(self, raw_command, compiled_command, target): printable_command = compiled_command zProps = [] - if hasattr(target, 'zenPropertyIds'): - zProps = [zp for zp in target.zenPropertyIds() if zp in raw_command] + if hasattr(target, "zenPropertyIds"): + zProps = [ + zp for zp in target.zenPropertyIds() if zp in raw_command + ] if len(zProps) > 0: raw_items = raw_command.split() @@ -62,7 +70,7 @@ def _get_printable_command(self, raw_command, compiled_command, target): if any(p in raw_item for p in zProps): item = raw_item printable_items.append(item) - printable_command = ' '.join(printable_items) + printable_command = " ".join(printable_items) else: # We could not filter the zprops so we return the raw command printable_command = raw_command @@ -73,18 +81,25 @@ def execute(self, cmd, target): try: compiled = str(self.context.compile(cmd, target)) - timeout = getattr(target, 'zCommandUserCommandTimeout', - self.context.defaultTimeout) + timeout = getattr( + target, + "zCommandUserCommandTimeout", + self.context.defaultTimeout, + ) end = time.time() + timeout - self.write('==== %s ====' % target.titleOrId()) - printable_command = self._get_printable_command(cmd.command, compiled, target) + self.write("==== %s ====" % target.titleOrId()) + printable_command = self._get_printable_command( + cmd.command, compiled, target + ) self.write(printable_command) - audit('UI.Command.Invoke', cmd.id, target=target.id) - p = subprocess.Popen(shlex.split(compiled), - bufsize=1, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) + audit("UI.Command.Invoke", cmd.id, target=target.id) + p = subprocess.Popen( + shlex.split(compiled), + bufsize=1, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) retcode = None while time.time() < end or retcode is not None: line = p.stdout.readline() @@ -103,19 +118,19 @@ def execute(self, cmd, target): break else: p.kill() - self.write('Command timed out for %s (timeout is %s seconds)'%( - target.titleOrId(), timeout) - ) + self.write( + "Command timed out for %s (timeout is %s seconds)" + % (target.titleOrId(), timeout) + ) except Exception: - self.write('Exception while performing command for %s' % - target.id) - self.write('Type: %s Value: %s' % tuple(sys.exc_info()[:2])) + self.write("Exception while performing command for %s" % target.id) + self.write("Type: %s Value: %s" % tuple(sys.exc_info()[:2])) class BackupView(StreamingView): def stream(self): - data = unjson(self.request.get('data')) - args = data['args'] + data = unjson(self.request.get("data")) + args = data["args"] includeEvents = args[0] includeMysqlLogin = args[1] timeoutString = args[2] @@ -123,8 +138,9 @@ def stream(self): timeout = int(timeoutString) except ValueError: timeout = 120 - self.context.zport.dmd.manage_createBackup(includeEvents, - includeMysqlLogin, timeout, self.request, self.write) + self.context.zport.dmd.manage_createBackup( + includeEvents, includeMysqlLogin, timeout, self.request, self.write + ) class MonitorDatasource(StreamingView): @@ -139,23 +155,22 @@ def stream(self): """ try: request = self.request - data = unjson(request.form['data']) + data = unjson(request.form["data"]) # datasource expect the request object, so set the attributes - # from the request (so the user can test without saving the datasource) + # from the request (so the user can test without saving the + # datasource). for key in data: request[key] = data[key] self.write("Preparing Command...") - request['renderTemplate'] = False + request["renderTemplate"] = False results = self.context.testDataSourceAgainstDevice( - data.get('testDevice'), - request, - self.write, - self.reportError) + data.get("testDevice"), request, self.write, self.reportError + ) return results except Exception: - self.write('Exception while performing command:
') - self.write('
%s
' % (traceback.format_exc())) + self.write("Exception while performing command:
") + self.write("
%s
" % (traceback.format_exc())) def reportError(self, title, body, priority=None, image=None): """ @@ -170,10 +185,11 @@ class ModelView(StreamingView): """ Accepts a list of uids to model. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for device in imap(facade._getObject, uids): device.collectDevice(REQUEST=self.request, write=self.write) @@ -182,49 +198,56 @@ class ModelDebugView(StreamingView): """ Accepts a list of uids to model. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for device in imap(facade._getObject, uids): - device.collectDevice(REQUEST=self.request, write=self.write, - debug=True) + device.collectDevice( + REQUEST=self.request, write=self.write, debug=True + ) class GroupModelView(StreamingView): """ Accepts a list of organizer uids to model devices they contain. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for deviceOrganizer in imap(facade._getObject, uids): - deviceOrganizer.collectDevice(REQUEST=self.request, - write=self.write) + deviceOrganizer.collectDevice( + REQUEST=self.request, write=self.write + ) class GroupModelDebugView(StreamingView): """ Accepts a list of organizer uids to model devices they contain. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for deviceOrganizer in imap(facade._getObject, uids): - deviceOrganizer.collectDevice(REQUEST=self.request, - write=self.write, debug=True) + deviceOrganizer.collectDevice( + REQUEST=self.request, write=self.write, debug=True + ) class MonitorView(StreamingView): """ Accepts a list of uids to monitor. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for device in imap(facade._getObject, uids): device.runDeviceMonitor(REQUEST=self.request, write=self.write) @@ -233,35 +256,42 @@ class MonitorDebugView(StreamingView): """ Accepts a list of uids to monitor. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for device in imap(facade._getObject, uids): - device.runDeviceMonitor(REQUEST=self.request, write=self.write, debug=True) + device.runDeviceMonitor( + REQUEST=self.request, write=self.write, debug=True + ) class GroupMonitorView(StreamingView): """ Accepts a list of organizer uids to monitor devices they contain. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for deviceOrganizer in imap(facade._getObject, uids): - deviceOrganizer.runDeviceMonitor(REQUEST=self.request, - write=self.write) + deviceOrganizer.runDeviceMonitor( + REQUEST=self.request, write=self.write + ) class GroupMonitorDebugView(StreamingView): """ Accepts a list of organizer uids to monitor devices they contain. """ + def stream(self): - data = unjson(self.request.get('data')) - uids = data['uids'] - facade = getFacade('device', self.context) + data = unjson(self.request.get("data")) + uids = data["uids"] + facade = getFacade("device", self.context) for deviceOrganizer in imap(facade._getObject, uids): - deviceOrganizer.runDeviceMonitor(REQUEST=self.request, - write=self.write, debug=True) + deviceOrganizer.runDeviceMonitor( + REQUEST=self.request, write=self.write, debug=True + ) diff --git a/Products/ZenUI3/browser/device_export.py b/Products/ZenUI3/browser/device_export.py index 95cf9d7870..dbc7695ce6 100644 --- a/Products/ZenUI3/browser/device_export.py +++ b/Products/ZenUI3/browser/device_export.py @@ -8,28 +8,34 @@ ############################################################################## import logging -from Products import Zuul + +from xml.etree.ElementTree import Element, tostring + from Products.Five.browser import BrowserView + +from Products import Zuul +from Products.ZenEvents.EventClass import EventClass from Products.ZenUtils.jsonutils import unjson from Products.Zuul.facades.devicefacade import DeviceFacade -from xml.etree.ElementTree import Element, tostring -from Products.ZenEvents.EventClass import EventClass -log = logging.getLogger('zen.deviceexporter') +log = logging.getLogger("zen.deviceexporter") def event(values): """ - :param values: dictionary with total and acknowledged count for each event's severity + :param values: dictionary with total and acknowledged count for each + event's severity. :return: total count of events with higher severity """ # Get event's types with non-zero value of "count" field - values = {x:y.get('count') for x,y in values.iteritems() if y.get('count')} + values = { + x: y.get("count") for x, y in values.iteritems() if y.get("count") + } if not values: return "0" # Swap severitie's keys and values - severities = {x.lower():y for y,x in EventClass.severities.items()} + severities = {x.lower(): y for y, x in EventClass.severities.items()} # Get name of highly important event kind in values m = max(values, key=severities.__getitem__) # Report it @@ -37,65 +43,77 @@ def event(values): class DeviceExporter(BrowserView): - def __call__(self): - body = unjson(self.request.form['body']) - type = body.get('type') - getattr(self, type)(self.request.response, self._query(body), body['fields']) + body = unjson(self.request.form["body"]) + type = body.get("type") + getattr(self, type)( + self.request.response, self._query(body), body["fields"] + ) def _query(self, params): device_router = DeviceFacade(self.context) - devices = device_router.getDevices(uid=params['uid'], - sort=params['sort'], - dir=params['sdir'], - params=params['params'], - limit=None) - return Zuul.marshal(devices.results, params['fields']) + devices = device_router.getDevices( + uid=params["uid"], + sort=params["sort"], + dir=params["sdir"], + params=params["params"], + limit=None, + ) + return Zuul.marshal(devices.results, params["fields"]) @staticmethod def xml(response, devices, fields): - response.setHeader('Content-Type', 'text/xml; charset=utf-8') - response.setHeader('Content-Disposition', 'attachment; filename=devices.xml') + response.setHeader("Content-Type", "text/xml; charset=utf-8") + response.setHeader( + "Content-Disposition", "attachment; filename=devices.xml" + ) response.write('\n') - response.write('\n') + response.write("\n") for device in devices: - xml_device = Element('ZenossDevice') + xml_device = Element("ZenossDevice") for field in fields: device_field = Element(field) - value = device.get(field, '') - if field == 'events': + value = device.get(field, "") + if field == "events": device_field.text = event(value) - elif field in ['systems', 'groups']: + elif field in ["systems", "groups"]: for s in value: sub_field = Element(field[:-1]) - sub_field.text = str(s.get('name')) + sub_field.text = str(s.get("name")) device_field.append(sub_field) elif isinstance(value, dict): - value = value.get('name', '') + value = value.get("name", "") device_field.text = str(value) else: device_field.text = str(value) xml_device.append(device_field) response.write(tostring(xml_device)) response.write("") - response.write('\n') + response.write("\n") @staticmethod def csv(response, devices, fields): - response.setHeader('Content-Type', 'application/vns.ms-excel') - response.setHeader('Content-Disposition', 'attachment; filename=devices.csv') + response.setHeader("Content-Type", "application/vns.ms-excel") + response.setHeader( + "Content-Disposition", "attachment; filename=devices.csv" + ) from csv import writer + writer = writer(response) writer.writerow(fields) for device in devices: data = [] for field in fields: - value = device.get(field, '') + value = device.get(field, "") if isinstance(value, list): - value = "|".join([v.get('name') for v in value]) + value = "|".join([v.get("name") for v in value]) if isinstance(value, dict): - value = event(value) if field == 'events' else value.get('name') - if not (value or value is 0): - value = '' + value = ( + event(value) + if field == "events" + else value.get("name") + ) + if not (value or value == 0): + value = "" data.append(str(value).strip()) writer.writerow(data) diff --git a/Products/ZenUI3/browser/eventconsole/columns.py b/Products/ZenUI3/browser/eventconsole/columns.py index edaea0cb24..99e06df097 100644 --- a/Products/ZenUI3/browser/eventconsole/columns.py +++ b/Products/ZenUI3/browser/eventconsole/columns.py @@ -7,7 +7,6 @@ # ############################################################################## - """ This module describes the parameters for columns that may appear in the event console. This is used both to generate the JavaScript defining the columns and @@ -15,25 +14,24 @@ """ import copy + from zenoss.protocols.protobufs.zep_pb2 import ( - STATUS_NEW, + SEVERITY_CLEAR, + SEVERITY_CRITICAL, + SEVERITY_DEBUG, + SEVERITY_ERROR, + SEVERITY_INFO, + SEVERITY_WARNING, STATUS_ACKNOWLEDGED, STATUS_AGED, STATUS_CLEARED, STATUS_CLOSED, + STATUS_NEW, STATUS_SUPPRESSED, - SEVERITY_CRITICAL, - SEVERITY_CLEAR, - SEVERITY_DEBUG, - SEVERITY_ERROR, - SEVERITY_INFO, - SEVERITY_WARNING ) - -__doc__=""" - +__doc__ = """ The COLUMN_CONFIG dictionary contains the full definitions for all of the columns that show up in grids. The following definition covers all of the 'base' Zenoss properties. This dictionary is augmented later to @@ -64,381 +62,302 @@ field_definition= "{name:'stateChange', type:'date', dateFormat: Zenoss.date.ISO8601Long}" -""" -COLUMN_CONFIG = { - - 'evid' : dict( - header='Event ID', - filter='textfield', - sortable=True), +""" # noqa E501 - 'dedupid' : dict( - header='Fingerprint', - filter='textfield', +COLUMN_CONFIG = { + "evid": dict(header="Event ID", filter="textfield", sortable=True), + "dedupid": dict( + header="Fingerprint", + filter="textfield", sortable=True, - ), - - 'eventState': dict( - header='Status', + ), + "eventState": dict( + header="Status", width=60, sortable=True, filter={ - 'xtype':'multiselectmenu', - 'text':'...', - 'cls': 'x-btn x-btn-default-toolbar-small', - 'source':[{ - 'value':STATUS_NEW, - 'name':'New' - }, - { - 'value':STATUS_ACKNOWLEDGED, - 'name':'Acknowledged' - }, - { - 'value':STATUS_SUPPRESSED, - 'name':'Suppressed', - 'checked':False - }, - { - 'value':STATUS_CLOSED, - 'name':'Closed', - 'checked':False - }, - { - 'value':STATUS_CLEARED, - 'name':'Cleared', - 'checked':False - }, - { - 'value':STATUS_AGED, - 'name':'Aged', - 'checked':False - }] + "xtype": "multiselectmenu", + "text": "...", + "cls": "x-btn x-btn-default-toolbar-small", + "source": [ + {"value": STATUS_NEW, "name": "New"}, + {"value": STATUS_ACKNOWLEDGED, "name": "Acknowledged"}, + { + "value": STATUS_SUPPRESSED, + "name": "Suppressed", + "checked": False, + }, + {"value": STATUS_CLOSED, "name": "Closed", "checked": False}, + {"value": STATUS_CLEARED, "name": "Cleared", "checked": False}, + {"value": STATUS_AGED, "name": "Aged", "checked": False}, + ], }, - renderer='Zenoss.util.render_status'), - - 'severity' : dict( - header='Severity', + renderer="Zenoss.util.render_status", + ), + "severity": dict( + header="Severity", width=60, sortable=True, filter={ - 'xtype':'multiselectmenu', - 'text':'...', - 'cls': 'x-btn x-btn-default-toolbar-small', - 'source': [{ - 'value': SEVERITY_CRITICAL, - 'name': 'Critical' - },{ - 'value': SEVERITY_ERROR, - 'name': 'Error' - },{ - 'value': SEVERITY_WARNING, - 'name': 'Warning' - },{ - 'value': SEVERITY_INFO, - 'name':'Info' - },{ - 'value': SEVERITY_DEBUG, - 'name':'Debug', - 'checked':False - },{ - 'value': SEVERITY_CLEAR, - 'name':'Clear', - 'checked':False - }] + "xtype": "multiselectmenu", + "text": "...", + "cls": "x-btn x-btn-default-toolbar-small", + "source": [ + {"value": SEVERITY_CRITICAL, "name": "Critical"}, + {"value": SEVERITY_ERROR, "name": "Error"}, + {"value": SEVERITY_WARNING, "name": "Warning"}, + {"value": SEVERITY_INFO, "name": "Info"}, + {"value": SEVERITY_DEBUG, "name": "Debug", "checked": False}, + {"value": SEVERITY_CLEAR, "name": "Clear", "checked": False}, + ], }, - renderer='Zenoss.util.render_severity', - field_definition = "{name:'severity',type:'int'}" + renderer="Zenoss.util.render_severity", + field_definition="{name:'severity',type:'int'}", ), - - 'device' : dict( - header='Resource', - filter='textfield', - renderer='Zenoss.render.linkFromGrid', - sortable=True), - - 'component' : dict( - header='Component', - filter='textfield', - renderer='Zenoss.render.linkFromGrid', + "device": dict( + header="Resource", + filter="textfield", + renderer="Zenoss.render.linkFromGrid", + sortable=True, + ), + "component": dict( + header="Component", + filter="textfield", + renderer="Zenoss.render.linkFromGrid", width=80, - sortable=True), - - 'eventClass': dict( - header='Event Class', - filter={ - 'xtype':'eventclass', - 'forceSelection': False - }, + sortable=True, + ), + "eventClass": dict( + header="Event Class", + filter={"xtype": "eventclass", "forceSelection": False}, width=80, - renderer='Zenoss.render.linkFromGrid', - sortable=True), - - 'summary' : dict( - header='Summary', - flex= 1, - filter='textfield', - renderer='Zenoss.render.eventSummaryRow', - sortable=True), - - 'firstTime' : dict( - header='First Seen', + renderer="Zenoss.render.linkFromGrid", + sortable=True, + ), + "summary": dict( + header="Summary", + flex=1, + filter="textfield", + renderer="Zenoss.render.eventSummaryRow", + sortable=True, + ), + "firstTime": dict( + header="First Seen", sortable=True, filter={ - 'xtype':'daterange', - 'format':'Y-m-d H:i:s', - 'invalidText' : "{0} is not a valid date - it must be in the format yyyy/mm/dd hh:mm:ss", - 'altFormats':'m/d/Y|n/j/Y|n/j/y|m/j/y|n/d/y|m/j/Y|n/d/Y|m-d-y|m-d-Y|m/d|m-d|md|mdy|mdY|d|Y-m-d|n-j|n/j|Y-m-d H:i:s \\T\\O Y-m-d H:i:s' + "xtype": "daterange", + "format": "Y-m-d H:i:s", + "invalidText": ( + "{0} is not a valid date - " + "it must be in the format yyyy/mm/dd hh:mm:ss" + ), + "altFormats": ( + "m/d/Y|n/j/Y|n/j/y|m/j/y|n/d/y|m/j/Y|n/d/Y|" + "m-d-y|m-d-Y|m/d|m-d|md|mdy|mdY|d|Y-m-d|" + "n-j|n/j|Y-m-d H:i:s \\T\\O Y-m-d H:i:s" + ), }, width=135, - renderer='Zenoss.date.renderDateColumn()' + renderer="Zenoss.date.renderDateColumn()", ), - - 'lastTime' : dict( - header='Last Seen', + "lastTime": dict( + header="Last Seen", sortable=True, filter={ - 'xtype':'daterange', - 'format':'Y-m-d H:i:s', - 'invalidText' : "{0} is not a valid date - it must be in the format yyyy/mm/dd hh:mm:ss", - 'altFormats':'m/d/Y|n/j/Y|n/j/y|m/j/y|n/d/y|m/j/Y|n/d/Y|m-d-y|m-d-Y|m/d|m-d|md|mdy|mdY|d|Y-m-d|n-j|n/j|Y-m-d H:i:s \\T\\O Y-m-d H:i:s' + "xtype": "daterange", + "format": "Y-m-d H:i:s", + "invalidText": ( + "{0} is not a valid date - " + "it must be in the format yyyy/mm/dd hh:mm:ss" + ), + "altFormats": ( + "m/d/Y|n/j/Y|n/j/y|m/j/y|n/d/y|m/j/Y|n/d/Y|" + "m-d-y|m-d-Y|m/d|m-d|md|mdy|mdY|d|Y-m-d|" + "n-j|n/j|Y-m-d H:i:s \\T\\O Y-m-d H:i:s" + ), }, width=135, - renderer='Zenoss.date.renderDateColumn()' + renderer="Zenoss.date.renderDateColumn()", ), - - 'count' : dict( - header='Count', + "count": dict( + header="Count", sortable=True, width=60, - align='right', - filter={ - 'xtype': 'textfield', - 'vtype': 'numrange' - }, - field_definition = "{name:'count',type:'int'}" + align="right", + filter={"xtype": "textfield", "vtype": "numrange"}, + field_definition="{name:'count',type:'int'}", ), - - 'prodState' : dict( - header='Production State', + "prodState": dict( + header="Production State", sortable=True, - filter={ - 'xtype':'multiselect-prodstate' - }), - - 'DevicePriority': dict( - header='Device Priority', + filter={"xtype": "multiselect-prodstate"}, + ), + "DevicePriority": dict( + header="Device Priority", sortable=True, - filter={ - 'xtype':'multiselect-devicepriority' - }), - - 'stateChange': dict( - header='State Change', + filter={"xtype": "multiselect-devicepriority"}, + ), + "stateChange": dict( + header="State Change", sortable=True, filter={ - 'xtype':'daterange', - 'format':'Y-m-d H:i:s', - 'invalidText': "{0} is not a valid date - it must be in the format yyyy/mm/dd hh:mm:ss", - 'altFormats':'m/d/Y|n/j/Y|n/j/y|m/j/y|n/d/y|m/j/Y|n/d/Y|m-d-y|m-d-Y|m/d|m-d|md|mdy|mdY|d|Y-m-d|n-j|n/j|Y-m-d H:i:s \\T\\O Y-m-d H:i:s' + "xtype": "daterange", + "format": "Y-m-d H:i:s", + "invalidText": ( + "{0} is not a valid date - " + "it must be in the format yyyy/mm/dd hh:mm:ss" + ), + "altFormats": ( + "m/d/Y|n/j/Y|n/j/y|m/j/y|n/d/y|m/j/Y|n/d/Y|" + "m-d-y|m-d-Y|m/d|m-d|md|mdy|mdY|d|Y-m-d|" + "n-j|n/j|Y-m-d H:i:s \\T\\O Y-m-d H:i:s" + ), }, width=120, - renderer='Zenoss.date.renderDateColumn()' + renderer="Zenoss.date.renderDateColumn()", ), - - 'eventClassKey': dict( - header='Event Class Key', - filter='textfield', - sortable=True), - - 'eventGroup': dict( - header='Event Group', - filter='textfield', - sortable=True), - - 'eventKey' : dict( - header='Event Key', - filter='textfield', - sortable=True), - - 'agent' : dict( - header='Agent', - filter='textfield', - sortable=True), - - 'monitor': dict( - header='Collector', - filter='textfield', - sortable=True), - - 'ownerid': dict( - header='Owner', - filter='textfield', - sortable=True + "eventClassKey": dict( + header="Event Class Key", filter="textfield", sortable=True ), - - 'facility' : dict( - header='Syslog Facility', - sortable=False, - filter=False), - - 'priority' : dict( - header='Syslog Priority', - sortable=False, - filter=False), - - 'eventClassMapping': dict( - header='Event Class Mapping', + "eventGroup": dict( + header="Event Group", filter="textfield", sortable=True + ), + "eventKey": dict(header="Event Key", filter="textfield", sortable=True), + "agent": dict(header="Agent", filter="textfield", sortable=True), + "monitor": dict(header="Collector", filter="textfield", sortable=True), + "ownerid": dict(header="Owner", filter="textfield", sortable=True), + "facility": dict(header="Syslog Facility", sortable=False, filter=False), + "priority": dict(header="Syslog Priority", sortable=False, filter=False), + "eventClassMapping": dict( + header="Event Class Mapping", sortable=False, filter=False, - renderer='Zenoss.render.LinkFromGridGuidGroup'), - - 'clearid': dict( - header='Cleared by Event ID', - filter=False), - - 'ntevid': dict( - header='NT Event Code', - sortable=False, - filter=False), - - 'ipAddress' : dict( - header='IP Address', - sortable=True, - filter='textfield'), - - 'message' : dict( - header='Message', - renderer='Zenoss.render.eventSummaryRow', + renderer="Zenoss.render.LinkFromGridGuidGroup", + ), + "clearid": dict(header="Cleared by Event ID", filter=False), + "ntevid": dict(header="NT Event Code", sortable=False, filter=False), + "ipAddress": dict(header="IP Address", sortable=True, filter="textfield"), + "message": dict( + header="Message", + renderer="Zenoss.render.eventSummaryRow", sortable=False, - filter='textfield'), - - 'Location' : dict( - header='Location', + filter="textfield", + ), + "Location": dict( + header="Location", sortable=True, - filter='textfield', - renderer='Zenoss.render.LinkFromGridUidGroup' - ), - - 'DeviceGroups': dict( - header='Groups', + filter="textfield", + renderer="Zenoss.render.LinkFromGridUidGroup", + ), + "DeviceGroups": dict( + header="Groups", sortable=True, - filter='textfield', - renderer='Zenoss.render.LinkFromGridUidGroup' - ), - - 'Systems' : dict( - header='Systems', + filter="textfield", + renderer="Zenoss.render.LinkFromGridUidGroup", + ), + "Systems": dict( + header="Systems", sortable=True, - filter='textfield', - renderer='Zenoss.render.LinkFromGridUidGroup'), - - 'DeviceClass': dict( - header='Device Class', + filter="textfield", + renderer="Zenoss.render.LinkFromGridUidGroup", + ), + "DeviceClass": dict( + header="Device Class", sortable=True, - filter='textfield', - renderer='Zenoss.render.LinkFromGridUidGroup', - ), + filter="textfield", + renderer="Zenoss.render.LinkFromGridUidGroup", + ), } -COLUMN_CONFIG['eventStateText'] = dict( - dataIndex='eventState', - header='Status Text', +COLUMN_CONFIG["eventStateText"] = dict( + dataIndex="eventState", + header="Status Text", width=100, - filter=COLUMN_CONFIG['eventState']['filter'], - renderer='Zenoss.util.render_status_text', + filter=COLUMN_CONFIG["eventState"]["filter"], + renderer="Zenoss.util.render_status_text", ) ARCHIVE_COLUMN_CONFIG = copy.deepcopy(COLUMN_CONFIG) -ARCHIVE_COLUMN_CONFIG['eventState']['filter'] = { - 'xtype':'multiselectmenu', - 'text':'...', - 'cls': 'x-btn x-btn-default-toolbar-small', - 'source':[{ - 'value':STATUS_CLOSED, - 'name':'Closed', - }, - { - 'value':STATUS_CLEARED, - 'name':'Cleared', - }, - { - 'value':STATUS_AGED, - 'name':'Aged', - }] +ARCHIVE_COLUMN_CONFIG["eventState"]["filter"] = { + "xtype": "multiselectmenu", + "text": "...", + "cls": "x-btn x-btn-default-toolbar-small", + "source": [ + { + "value": STATUS_CLOSED, + "name": "Closed", + }, + { + "value": STATUS_CLEARED, + "name": "Cleared", + }, + { + "value": STATUS_AGED, + "name": "Aged", + }, + ], } -ARCHIVE_COLUMN_CONFIG['severity']['filter'] = { - 'xtype':'multiselectmenu', - 'text':'...', - 'cls': 'x-btn x-btn-default-toolbar-small', - 'source': [{ - 'value': SEVERITY_CRITICAL, - 'name': 'Critical' - },{ - 'value': SEVERITY_ERROR, - 'name': 'Error' - },{ - 'value': SEVERITY_WARNING, - 'name': 'Warning' - },{ - 'value': SEVERITY_INFO, - 'name':'Info' - },{ - 'value': SEVERITY_DEBUG, - 'name':'Debug', - 'checked':False - },{ - 'value': SEVERITY_CLEAR, - 'name':'Clear', - }] +ARCHIVE_COLUMN_CONFIG["severity"]["filter"] = { + "xtype": "multiselectmenu", + "text": "...", + "cls": "x-btn x-btn-default-toolbar-small", + "source": [ + {"value": SEVERITY_CRITICAL, "name": "Critical"}, + {"value": SEVERITY_ERROR, "name": "Error"}, + {"value": SEVERITY_WARNING, "name": "Warning"}, + {"value": SEVERITY_INFO, "name": "Info"}, + {"value": SEVERITY_DEBUG, "name": "Debug", "checked": False}, + { + "value": SEVERITY_CLEAR, + "name": "Clear", + }, + ], } DEFAULT_COLUMNS = [ - 'eventState', - 'severity', - 'device', - 'component', - 'eventClass', - 'summary', - 'firstTime', - 'lastTime', - 'count', + "eventState", + "severity", + "device", + "component", + "eventClass", + "summary", + "firstTime", + "lastTime", + "count", ] DEFAULT_COLUMN_ORDER = [ - 'evid', - 'dedupid', - - 'eventState', - 'eventStateText', - 'severity', - 'device', - 'component', - 'eventClass', - 'summary', - 'firstTime', - 'lastTime', - 'count', - - 'prodState', - 'DevicePriority', - 'stateChange', - 'eventClassKey', - 'eventGroup', - 'eventKey', - 'agent', - 'monitor', - 'ownerid', - 'facility', - 'priority', - 'eventClassMapping', - 'clearid', - 'ntevid', - 'ipAddress', - 'message', - 'Location', - 'DeviceGroups', - 'Systems', - 'DeviceClass' + "evid", + "dedupid", + "eventState", + "eventStateText", + "severity", + "device", + "component", + "eventClass", + "summary", + "firstTime", + "lastTime", + "count", + "prodState", + "DevicePriority", + "stateChange", + "eventClassKey", + "eventGroup", + "eventKey", + "agent", + "monitor", + "ownerid", + "facility", + "priority", + "eventClassMapping", + "clearid", + "ntevid", + "ipAddress", + "message", + "Location", + "DeviceGroups", + "Systems", + "DeviceClass", ] diff --git a/Products/ZenUI3/browser/eventconsole/configure.zcml b/Products/ZenUI3/browser/eventconsole/configure.zcml index 9dae600b2b..368d42d5ac 100644 --- a/Products/ZenUI3/browser/eventconsole/configure.zcml +++ b/Products/ZenUI3/browser/eventconsole/configure.zcml @@ -64,7 +64,7 @@ name="viewHistoryDetail" for="Products.ZenModel.EventView.IEventView" template="historydetail.pt" - permission="zenoss.View" + permission="zenoss.Common" /> \n' - '\n' - '\n' + "\n" + "\n" ) evutil = IEventManagerProxy(self) @@ -176,25 +205,29 @@ def xml(self, response, archive, options, **params): ) % (escape(zem.absolute_url_path())) for fields, evt in self._query(archive, **params): - firstTime = self._timeformat(evt['firstTime'], options) + firstTime = self._timeformat(evt["firstTime"], options) + response.write( + "\n" % quoteattr(firstTime) + ) response.write( - '\n' % quoteattr(firstTime)) - response.write(( - "\t\n" - "\t\t%s\n" - "\t\t%s\n" - "\t\t%s\n" - "\t\n" - ) % ( - escape(str(evt.get('DeviceClass', ''))), - escape(str(evt.get('device', ''))), - escape(str(evt.get('ipAddress', ''))) - )) + ( + "\t\n" + "\t\t%s\n" + "\t\t%s\n" + "\t\t%s\n" + "\t\n" + ) + % ( + escape(str(evt.get("DeviceClass", ""))), + escape(str(evt.get("device", ""))), + escape(str(evt.get("ipAddress", ""))), + ) + ) response.write(reporterComponent) - for tag in ('dedupid', 'summary', 'message'): + for tag in ("dedupid", "summary", "message"): response.write( - '\t<{tag}>{val}\n'.format( - tag=tag, val=escape(str(evt.pop(tag, ''))) + "\t<{tag}>{val}\n".format( + tag=tag, val=escape(str(evt.pop(tag, ""))) ) ) @@ -203,19 +236,26 @@ def xml(self, response, archive, options, **params): evt.update(details) del evt[DETAILS_KEY] - exportVisible = params.get('exportVisible', True) - evtItems = {k: v for k, v in evt.iteritems() - if k in fields and exportVisible} if exportVisible else evt + exportVisible = params.get("exportVisible", True) + evtItems = ( + { + k: v + for k, v in evt.iteritems() + if k in fields and exportVisible + } + if exportVisible + else evt + ) for key, value in evtItems.iteritems(): if value is not None: if key in ("lastTime", "firstTime", "stateChange"): value = self._timeformat(value, options) - key = str(key).replace('.', '_') + key = str(key).replace(".", "_") response.write( - '\t<%s>%s\n' % (key, escape(str(value)), key) + "\t<%s>%s\n" % (key, escape(str(value)), key) ) - response.write('\n') + response.write("\n") response.write("\n") diff --git a/Products/ZenUI3/browser/eventconsole/grid.py b/Products/ZenUI3/browser/eventconsole/grid.py index c0ee55ce45..e811e9e02c 100644 --- a/Products/ZenUI3/browser/eventconsole/grid.py +++ b/Products/ZenUI3/browser/eventconsole/grid.py @@ -7,32 +7,37 @@ # ############################################################################## +import logging import zope.i18n -from zope.i18nmessageid import MessageFactory -_ = MessageFactory('zenoss') from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile +from zope.i18nmessageid import MessageFactory -from Products.Zuul import getFacade - -from Products.ZenUtils.jsonutils import JavaScript, javascript +from Products.ZenUI3.browser.eventconsole.columns import ( + ARCHIVE_COLUMN_CONFIG, + COLUMN_CONFIG, + DEFAULT_COLUMN_ORDER, + DEFAULT_COLUMNS, +) from Products.ZenUI3.utils.javascript import JavaScriptSnippet -from Products.ZenUI3.browser.eventconsole.columns import COLUMN_CONFIG, ARCHIVE_COLUMN_CONFIG, DEFAULT_COLUMN_ORDER, DEFAULT_COLUMNS +from Products.ZenUtils.jsonutils import JavaScript, javascript +from Products.Zuul import getFacade from zenoss.protocols.protobufs.zep_pb2 import EventDetailItem from zenoss.protocols.services.zep import ZepConnectionError -import logging -log = logging.getLogger('zep.grid') +_ = MessageFactory("zenoss") + +log = logging.getLogger("zep.grid") + class EventConsoleView(BrowserView): - __call__ = ViewPageTemplateFile('view-events.pt') + __call__ = ViewPageTemplateFile("view-events.pt") class HistoryConsoleView(BrowserView): - __call__ = ViewPageTemplateFile('view-history-events.pt') - + __call__ = ViewPageTemplateFile("view-history-events.pt") def _find_column_fields(): @@ -43,10 +48,10 @@ def _find_column_fields(): TODO: We need to map these details to the old property names. """ try: - details = getFacade('zep').getUnmappedDetails() + details = getFacade("zep").getUnmappedDetails() for item in details: - if item['key'] not in DEFAULT_COLUMN_ORDER: - DEFAULT_COLUMN_ORDER.append(item['key']) + if item["key"] not in DEFAULT_COLUMN_ORDER: + DEFAULT_COLUMN_ORDER.append(item["key"]) except ZepConnectionError as e: log.error(e.message) @@ -66,7 +71,7 @@ def _find_column_definitions(archive=False): columns = ARCHIVE_COLUMN_CONFIG try: - details = getFacade('zep').getUnmappedDetails() + details = getFacade("zep").getUnmappedDetails() except ZepConnectionError as e: log.error(e.message) return columns @@ -74,20 +79,20 @@ def _find_column_definitions(archive=False): for item in details: # add or update anything that already exists in our column definition # with the result from ZEP. This will override known columns and create - # new column definitions for new custom fields. The id for these columns - # is implied from the key used to store in columns. + # new column definitions for new custom fields. The id for these + # columns is implied from the key used to store in columns. detailConfig = { - 'header': item['name'], - 'filter': { 'xtype': 'textfield' }, - 'sortable': True, + "header": item["name"], + "filter": {"xtype": "textfield"}, + "sortable": True, } - if item['type'] in (EventDetailItem.INTEGER, EventDetailItem.LONG): - detailConfig['filter']['vtype'] = 'numrange' - elif item['type'] in (EventDetailItem.DOUBLE, EventDetailItem.FLOAT): - detailConfig['filter']['vtype'] = 'floatrange' + if item["type"] in (EventDetailItem.INTEGER, EventDetailItem.LONG): + detailConfig["filter"]["vtype"] = "numrange" + elif item["type"] in (EventDetailItem.DOUBLE, EventDetailItem.FLOAT): + detailConfig["filter"]["vtype"] = "floatrange" - columns[item['key']] = detailConfig + columns[item["key"]] = detailConfig return columns @@ -98,17 +103,16 @@ def reader_config(archive=False): readerFields = [] fields = _find_column_fields() for field in fields: - # If the column definition also has a property for defining the field on - # the reader, use that. If not, we have to just use the defaults. - if 'field_definition' in columns[field]: - col = JavaScript(columns[field]['field_definition']) + # If the column definition also has a property for defining the field + # on the reader, use that. If not, we have to just use the defaults. + if "field_definition" in columns[field]: + col = JavaScript(columns[field]["field_definition"]) else: col = dict(name=field) readerFields.append(javascript(col)) return readerFields - def column_config(request=None, archive=False): columns = _find_column_definitions(archive) @@ -117,19 +121,19 @@ def column_config(request=None, archive=False): for field in fields: col = columns[field].copy() if request: - msg = _(col['header']) - col['header'] = zope.i18n.translate(msg, context=request) - col['id'] = field.replace('.', '_') - if 'dataIndex' not in col: - col['dataIndex'] = field - col['filterKey'] = field - if isinstance(col['filter'], basestring): - col['filter'] = {'xtype':col['filter']} - col['sortable'] = col.get('sortable', False) - col['hidden'] = col.get('hidden', field not in DEFAULT_COLUMNS) - - if 'renderer' in col: - col['renderer'] = JavaScript(col['renderer']) + msg = _(col["header"]) + col["header"] = zope.i18n.translate(msg, context=request) + col["id"] = field.replace(".", "_") + if "dataIndex" not in col: + col["dataIndex"] = field + col["filterKey"] = field + if isinstance(col["filter"], basestring): + col["filter"] = {"xtype": col["filter"]} + col["sortable"] = col.get("sortable", False) + col["hidden"] = col.get("hidden", field not in DEFAULT_COLUMNS) + + if "renderer" in col: + col["renderer"] = JavaScript(col["renderer"]) column_definitions.append(javascript(col)) return column_definitions @@ -138,18 +142,23 @@ def column_config(request=None, archive=False): class EventClasses(JavaScriptSnippet): def snippet(self): paths = self.context.dmd.Events.getOrganizerNames() - return """ + return ( + """ Ext.onReady(function(){ Zenoss.env.EVENT_CLASSES = %s; }) - """ % paths + """ + % paths + ) class GridColumnDefinitions(JavaScriptSnippet): - def snippet(self): - last_path_item = self.request['PATH_INFO'].split('/')[-1] - archive = last_path_item.lower().find('history') != -1 or last_path_item.lower().find('archive') != -1 + last_path_item = self.request["PATH_INFO"].split("/")[-1] + archive = ( + last_path_item.lower().find("history") != -1 + or last_path_item.lower().find("archive") != -1 + ) result = ["Ext.onReady(function(){"] @@ -157,34 +166,42 @@ def snippet(self): reader_fields = reader_config(archive=archive) - result.append('Zenoss.env.COLUMN_DEFINITIONS=[') - result.append(',\n'.join(defs)) - result.append('];') + result.append("Zenoss.env.COLUMN_DEFINITIONS=[") + result.append(",\n".join(defs)) + result.append("];") - result.append('Zenoss.env.READER_DEFINITIONS=[') - result.append(',\n'.join(reader_fields)) - result.append('];') + result.append("Zenoss.env.READER_DEFINITIONS=[") + result.append(",\n".join(reader_fields)) + result.append("];") - result.append('Zenoss.env.ZP_DETAILS=[') + result.append("Zenoss.env.ZP_DETAILS=[") try: - zepdetails = getFacade('zep').getUnmappedDetails() + zepdetails = getFacade("zep").getUnmappedDetails() zpdetails = [] for detail in zepdetails: - if detail['type'] in (EventDetailItem.STRING, EventDetailItem.IP_ADDRESS, EventDetailItem.PATH): - rulecmp = 'Zenoss.form.rule.STRINGCOMPARISONS' + if detail["type"] in ( + EventDetailItem.STRING, + EventDetailItem.IP_ADDRESS, + EventDetailItem.PATH, + ): + rulecmp = "Zenoss.form.rule.STRINGCOMPARISONS" else: - rulecmp = 'Zenoss.form.rule.NUMBERCOMPARISONS' - zpdetails.append("{{ text: _t('{name}'), value: '{key}', comparisons: {cmp} }}".format(name=detail['name'], key=detail['key'], cmp=rulecmp)) - result.append(',\n'.join(zpdetails)) + rulecmp = "Zenoss.form.rule.NUMBERCOMPARISONS" + zpdetails.append( + "{{ text: _t('{name}'), value: '{key}', " + "comparisons: {cmp} }}".format( + name=detail["name"], key=detail["key"], cmp=rulecmp + ) + ) + result.append(",\n".join(zpdetails)) except ZepConnectionError as e: log.error(e.message) - result.append('];') - + result.append("];") result.append("Zenoss.env.EVENT_AUTO_EXPAND_COLUMN='summary';") - - result.append(""" + result.append( + """ Ext.define('Zenoss.events.Model', { extend: 'Ext.data.Model', @@ -192,7 +209,7 @@ def snippet(self): fields: Zenoss.env.READER_DEFINITIONS }); -""") - result.append('});') - return '\n'.join(result) - +""" + ) + result.append("});") + return "\n".join(result) diff --git a/Products/ZenUI3/browser/eventconsole/interfaces.py b/Products/ZenUI3/browser/eventconsole/interfaces.py index 86b1f6e242..668d922856 100644 --- a/Products/ZenUI3/browser/eventconsole/interfaces.py +++ b/Products/ZenUI3/browser/eventconsole/interfaces.py @@ -1,27 +1,30 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import zope.interface + class IEventManagerProxy(zope.interface.Interface): """ Holds several methods useful for interacting with a Zenoss event manager. """ + def _is_history(): """ Should we be dealing with a history manager? """ + def _evmgr(): """ Get an event manager """ + def _extract_data_from_zevent(): """ Turn an event into a dictionary containing necessary fields. diff --git a/Products/ZenUI3/browser/graphs.py b/Products/ZenUI3/browser/graphs.py index 96183750cf..d1bb4f89a2 100644 --- a/Products/ZenUI3/browser/graphs.py +++ b/Products/ZenUI3/browser/graphs.py @@ -6,37 +6,45 @@ # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## + import json -from collections import OrderedDict import logging -from Products.ZenUtils.Time import LocalDateTimeFromMilli + +from collections import OrderedDict + from Products.Five.browser import BrowserView -log = logging.getLogger('zen.graphexport') + +from Products.ZenUtils.Time import LocalDateTimeFromMilli + +log = logging.getLogger("zen.graphexport") class ExportGraph(BrowserView): """ This view takes the chart data and sends it back to the client as a CSV. """ + def __call__(self, *args, **kwargs): """ Takes the posted "plots" element and exports a CSV """ - title = self.request.form.get('title', 'graph_export') - start = self.request.form.get('start', "Unknown") - end = self.request.form.get('end', "Unknown") - uid = self.request.form.get('uid', None) - plots = self.request.form.get('plots') - units = 'Units: {0}'.format(self.request.form.get('units')) + title = self.request.form.get("title", "graph_export") + start = self.request.form.get("start", "Unknown") + end = self.request.form.get("end", "Unknown") + uid = self.request.form.get("uid", None) + plots = self.request.form.get("plots") + units = "Units: {0}".format(self.request.form.get("units")) # come up with the title if uid: obj = self.context.unrestrictedTraverse(uid) - exportTitle = '{title}_{name}_from_{start}_to_{end}'.format(title=title.replace(' ', '_'), - start=start, - end=end, - name=obj.titleOrId()) + exportTitle = "{title}_{name}_from_{start}_to_{end}".format( + title=title.replace(" ", "_"), + start=start, + end=end, + name=obj.titleOrId(), + ) else: - exportTitle = title.replace(' ', '_') + exportTitle = title.replace(" ", "_") if not plots: self.request.response.write("Unable to load chart data.") @@ -45,32 +53,48 @@ def __call__(self, *args, **kwargs): plots = json.loads(plots) except Exception as e: log.exception(e) - self.request.response.write("POST data contains invalid json %s" % plots) + self.request.response.write( + "POST data contains invalid json %s" % plots + ) self.request.response.setHeader( - 'Content-Type', 'application/vnd.ms-excel') + "Content-Type", "application/vnd.ms-excel" + ) self.request.response.setHeader( - 'Content-Disposition', 'attachment; filename=%s.csv' % exportTitle) + "Content-Disposition", "attachment; filename=%s.csv" % exportTitle + ) # write the device information # construct the labels, Time will always be first - labels = ['Time'] + [p['key'] for p in plots] + [units] + labels = ["Time"] + [p["key"] for p in plots] + [units] # timestamps is a dictionary of values indexed by the time. This is to - # make sure we have a row for every unique timestamp in our csv, even if - # it is not present for all metrics + # make sure we have a row for every unique timestamp in our csv, even + # if # it is not present for all metrics timestamps = {} for p in plots: - for value in p['values']: - # x is always the timestamp and y is always the value at that time - time = value['x'] + for value in p["values"]: + # x is always the timestamp and y is always the + # value at that time. + time = value["x"] if not timestamps.get(time): timestamps[time] = dict() - timestamps[time][p['key']] = value['y'] - ordered_timestamps = OrderedDict(sorted(timestamps.items(), reverse=True)) + timestamps[time][p["key"]] = value["y"] + ordered_timestamps = OrderedDict( + sorted(timestamps.items(), reverse=True) + ) # writeExportRows works best with a dictionary of - # data will looks something like this [{u'15 Minute': 0.72, u'5 Minute': 0.8, u'1 Minute': 0.88, 'Time': '2013/10/04 13:43:20.000'}, ...] + # data will looks something like this + # [ + # { + # u'15 Minute': 0.72, + # u'5 Minute': 0.8, + # u'1 Minute': 0.88, + # 'Time': '2013/10/04 13:43:20.000' + # }, + # ... + # ] data = [] for time, values in ordered_timestamps.iteritems(): datum = dict(Time=LocalDateTimeFromMilli(time)) diff --git a/Products/ZenUI3/browser/interfaces.py b/Products/ZenUI3/browser/interfaces.py index feacb87d67..ec7f0253b0 100644 --- a/Products/ZenUI3/browser/interfaces.py +++ b/Products/ZenUI3/browser/interfaces.py @@ -1,71 +1,79 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from zope.interface import Interface -from Products.ZenUI3.utils.interfaces import IJavaScriptSnippetManager from zope.viewlet.interfaces import IViewlet, IViewletManager +from Products.ZenUI3.utils.interfaces import IJavaScriptSnippetManager + + class IMainSnippetManager(IJavaScriptSnippetManager): """ A viewlet manager to handle general javascript snippets. """ + class IJavaScriptSrcManager(IViewletManager): """ a viewlet manager to handle java script src viewlets """ - + + class ICSSSrcManager(IViewletManager): """ a viewlet manager to handle java script src viewlets """ - + class IXTraceSrcManager(IViewletManager): """ a viewlet manager to handle java script src viewlets that are needed for graph visualizations """ + class IJavaScriptSrcViewlet(IViewlet): """ A viewlet that will generate java a script src file includes """ - + + class ICSSSrcViewlet(IViewlet): """ A viewlet that will generate java a script src file includes """ - + class IJavaScriptBundleViewlet(IViewlet): """ A viewlet that will generate a list of CSS link file includes """ + class ICSSBundleViewlet(IViewlet): """ A viewlet that will generate a list of CSS link file includes """ - + class IHeadExtraManager(IViewletManager): """ A viewlet manager to allow ZenPacks, etc. to plug in extra stuff. """ + class INewPath(Interface): """ Translates old paths into new ones. """ + class IErrorMessage(Interface): """ A 404 or 500 page. diff --git a/Products/ZenUI3/browser/javascript.py b/Products/ZenUI3/browser/javascript.py index f0f523755f..f733417eea 100644 --- a/Products/ZenUI3/browser/javascript.py +++ b/Products/ZenUI3/browser/javascript.py @@ -7,26 +7,37 @@ # ############################################################################## - +import md5 import os + +from urlparse import urljoin + import Globals import zope.interface -import md5 -from urlparse import urljoin -from interfaces import IMainSnippetManager -from Products.ZenUI3.utils.javascript import JavaScriptSnippetManager,\ - JavaScriptSnippet, SCRIPT_TAG_TEMPLATE -from Products.ZenUI3.browser.interfaces import IJavaScriptSrcViewlet,\ - IJavaScriptBundleViewlet, IJavaScriptSrcManager, IXTraceSrcManager, ICSSBundleViewlet, ICSSSrcManager + from Products.Five.viewlet.viewlet import ViewletBase -from Products.ZenUI3.navigation.manager import WeightOrderedViewletManager -from Products.ZenUtils.extdirect.zope.metaconfigure import allDirectRouters from zope.publisher.browser import TestRequest from zope.component import getAdapter + +from Products.ZenUI3.browser.interfaces import ( + ICSSBundleViewlet, + ICSSSrcManager, + IJavaScriptBundleViewlet, + IJavaScriptSrcManager, + IJavaScriptSrcViewlet, + IXTraceSrcManager, +) +from Products.ZenUI3.navigation.manager import WeightOrderedViewletManager +from Products.ZenUI3.utils.javascript import ( + JavaScriptSnippet, + JavaScriptSnippetManager, + SCRIPT_TAG_TEMPLATE, +) +from Products.ZenUtils.extdirect.zope.metaconfigure import allDirectRouters from Products.ZenUtils.Utils import monkeypatch -from Products.ZenModel.ZVersion import VERSION from Products.Zuul.decorators import memoize +from .interfaces import IMainSnippetManager from .resources import COMPILED_JS_EXISTS @@ -36,7 +47,7 @@ _registered_resources = [] -@monkeypatch('Products.Five.browser.metaconfigure') +@monkeypatch("Products.Five.browser.metaconfigure") def resourceDirectory(*args, **kwargs): """ There isn't a way to ask the site manager for a list of registered @@ -48,33 +59,42 @@ def resourceDirectory(*args, **kwargs): global _registered_resources # will be name and directory _registered_resources.append(kwargs) - return original(*args, **kwargs) + return original(*args, **kwargs) # noqa F821 def getAllZenPackResources(): # make a copy so the original isn't mutated - return [x for x in _registered_resources if "zenpack" in x['directory'].lower()] + return [ + x for x in _registered_resources if "zenpack" in x["directory"].lower() + ] + @memoize def getPathModifiedTime(path): """ - This method takes a js request path such as /++resources++zenui/zenoss/file.js and + This method takes a js request path such as + /++resources++zenui/zenoss/file.js and returns the last time the file was modified. """ if "++resource++" in path: - identifier = path.split('/')[1].replace("++resource++", "") - filePath = path.replace("/++resource++" + identifier , "") + identifier = path.split("/")[1].replace("++resource++", "") + filePath = path.replace("/++resource++" + identifier, "") resource = getAdapter(dummyRequest, name=identifier) fullPath = resource.context.path + filePath if os.path.exists(fullPath): return os.path.getmtime(fullPath) + SCRIPT_TAG_SRC_TEMPLATE = '\n' -LINK_TAG_SRC_TEMPLATE = '\n' +LINK_TAG_SRC_TEMPLATE = ( + '\n' +) def absolutifyPath(path): - return urljoin('/zport/dmd', path) + return urljoin("/zport/dmd", path) + + getVersionedPath = absolutifyPath @@ -82,29 +102,33 @@ class MainSnippetManager(JavaScriptSnippetManager): """ A viewlet manager to handle Ext.Direct API definitions. """ + zope.interface.implements(IMainSnippetManager) + class CSSSrcManager(WeightOrderedViewletManager): zope.interface.implements(ICSSSrcManager) + class JavaScriptSrcManager(WeightOrderedViewletManager): zope.interface.implements(IJavaScriptSrcManager) + class XTraceSrcManager(WeightOrderedViewletManager): zope.interface.implements(IXTraceSrcManager) class CSSSrcBundleViewlet(ViewletBase): zope.interface.implements(ICSSBundleViewlet) - #space delimited string of src paths - paths = '' + # space delimited string of src paths + paths = "" def render(self): vals = [] if self.paths: for path in self.paths.split(): vals.append(LINK_TAG_SRC_TEMPLATE % absolutifyPath(path)) - js = '' + js = "" if vals: js = "".join(vals) return js @@ -122,43 +146,48 @@ def render(self): class JavaScriptSrcBundleViewlet(ViewletBase): zope.interface.implements(IJavaScriptBundleViewlet) - #space delimited string of src paths - paths = '' + # space delimited string of src paths + paths = "" def render(self): vals = [] if self.paths: for path in self.paths.split(): vals.append(SCRIPT_TAG_SRC_TEMPLATE % absolutifyPath(path)) - js = '' + js = "" if vals: js = "".join(vals) return js + class ExtDirectViewlet(JavaScriptSrcViewlet): """ A specialized renderer for ExtDirect. We can not cache-bust this file by the modified time so we use a hash of the defined routers """ + directHash = None def render(self): if self.directHash is None: # append the extdirect request with a hash or all routers # so that it is updated when a new zenpack is installed - routernames = sorted([r['name'] for r in allDirectRouters.values()]) + routernames = sorted( + [r["name"] for r in allDirectRouters.values()] + ) self.directHash = md5.new(" ".join(routernames)).hexdigest() - path = self.path + "?v=" + self.directHash + path = self.path + "?v=" + self.directHash return SCRIPT_TAG_SRC_TEMPLATE % path class ZenossAllJs(JavaScriptSrcViewlet): """ - When Zope is in debug mode, we want to use the development JavaScript source - files, so we don't have to make changes to a single huge file. If Zope is in - production mode and the compressed file is not available, we will use the - source files instead of just giving up. + When Zope is in debug mode, we want to use the development JavaScript + source files, so we don't have to make changes to a single huge file. + If Zope is in production mode and the compressed file is not available, + we will use the source files instead of just giving up. """ + zope.interface.implements(IJavaScriptSrcViewlet) def update(self): @@ -182,7 +211,6 @@ def update(self): class FireFoxExtCompat(JavaScriptSnippet): - def snippet(self): js = """ (function() { @@ -196,28 +224,29 @@ def snippet(self): } })(); """ - return SCRIPT_TAG_TEMPLATE % js - + return SCRIPT_TAG_TEMPLATE % js class VisualizationInit(JavaScriptSnippet): """ Performs necessary initialization for the visualization library """ + def snippet(self): js = """ if (window.zenoss !== undefined) { zenoss.visualization.url = window.location.protocol + "//" + window.location.host; zenoss.visualization.debug = false; } - """ - return SCRIPT_TAG_TEMPLATE % js + """ # noqa E501 + return SCRIPT_TAG_TEMPLATE % js class ZenossSettings(JavaScriptSnippet): """ Renders client side settings. """ + def snippet(self): settings = self.context.dmd.UserInterfaceSettings js = ["Ext.namespace('Zenoss.settings');"] @@ -225,24 +254,30 @@ def snippet(self): js.append("Zenoss.settings.%s = %s;" % (name, str(value).lower())) return "\n".join(js) + class ZenossData(JavaScriptSnippet): """ This preloads some data for the UI so that every page doesn't have to send a separate router request to fetch it. """ + def snippet(self): # collectors - collectors = [[s] for s in self.context.dmd.Monitors.getPerformanceMonitorNames()] + collectors = [ + [s] for s in self.context.dmd.Monitors.getPerformanceMonitorNames() + ] # priorities - priorities = [dict(name=s[0], - value=int(s[1])) for s in - self.context.dmd.getPriorityConversions()] + priorities = [ + dict(name=s[0], value=int(s[1])) + for s in self.context.dmd.getPriorityConversions() + ] # production states - productionStates = [dict(name=s[0], - value=int(s[1])) for s in - self.context.dmd.getProdStateConversions()] + productionStates = [ + dict(name=s[0], value=int(s[1])) + for s in self.context.dmd.getProdStateConversions() + ] # timezone # to determine the timezone we look in the following order @@ -264,22 +299,31 @@ def snippet(self): Zenoss.USER_DATE_FORMAT = "%s" || "YYYY/MM/DD"; Zenoss.USER_TIME_FORMAT = "%s" || "HH:mm:ss"; })(); - """ % ( collectors, priorities, productionStates, timezone, date_fmt, time_fmt ) + """ % ( + collectors, + priorities, + productionStates, + timezone, + date_fmt, + time_fmt, + ) return SCRIPT_TAG_TEMPLATE % snippet + class BrowserState(JavaScriptSnippet): """ Restores the browser state. """ + def snippet(self): try: userSettings = self.context.ZenUsers.getUserSettings() except AttributeError: # We're on a backcompat page where we don't have browser state # anyway. Move on. - return '' - state_container = getattr(userSettings, '_browser_state', {}) + return "" + state_container = getattr(userSettings, "_browser_state", {}) if isinstance(state_container, basestring): state_container = {} - state = state_container.get('state', '{}') - return 'Ext.state.Manager.getProvider().setState(%r);' % state + state = state_container.get("state", "{}") + return "Ext.state.Manager.getProvider().setState(%r);" % state diff --git a/Products/ZenUI3/browser/macros.py b/Products/ZenUI3/browser/macros.py index 55155fb28f..43eb4da0f3 100644 --- a/Products/ZenUI3/browser/macros.py +++ b/Products/ZenUI3/browser/macros.py @@ -7,7 +7,6 @@ # ############################################################################## - from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile @@ -15,32 +14,34 @@ class PageTemplateMacros(BrowserView): template_mappings = { - 'page1': ('../../ZenModel/skins/zenmodel/templates.pt', - 'page1'), - 'page2': ('../../ZenModel/skins/zenmodel/templates.pt', - 'page2'), - 'base': ('templates/base.pt', - 'base'), - 'base-new': ('templates/base-new.pt', - 'base-new'), - 'masterdetail': ('templates/masterdetail.pt', - 'masterdetail'), - 'masterdetail-new': ('templates/masterdetail-new.pt', - 'masterdetail-new'), - 'masterdetailsplit1': ('templates/masterdetailsplit1.pt', - 'masterdetailsplit1'), - 'masterdetailsplit2': ('templates/masterdetailsplit2.pt', - 'masterdetailsplit2'), - 'masterdetailsplit3': ('templates/masterdetailsplit3.pt', - 'masterdetailsplit3'), - 'masterdetailnested': ('templates/masterdetailnested.pt', - 'masterdetailnested'), - 'verticalbrowse': ('templates/verticalbrowse.pt', - 'verticalbrowse'), - 'old-new-no-tabs': ('templates/old-new.pt', - 'old-new-no-tabs'), - 'old-new': ('templates/old-new.pt', - 'old-new') + "page1": ("../../ZenModel/skins/zenmodel/templates.pt", "page1"), + "page2": ("../../ZenModel/skins/zenmodel/templates.pt", "page2"), + "base": ("templates/base.pt", "base"), + "base-new": ("templates/base-new.pt", "base-new"), + "masterdetail": ("templates/masterdetail.pt", "masterdetail"), + "masterdetail-new": ( + "templates/masterdetail-new.pt", + "masterdetail-new", + ), + "masterdetailsplit1": ( + "templates/masterdetailsplit1.pt", + "masterdetailsplit1", + ), + "masterdetailsplit2": ( + "templates/masterdetailsplit2.pt", + "masterdetailsplit2", + ), + "masterdetailsplit3": ( + "templates/masterdetailsplit3.pt", + "masterdetailsplit3", + ), + "masterdetailnested": ( + "templates/masterdetailnested.pt", + "masterdetailnested", + ), + "verticalbrowse": ("templates/verticalbrowse.pt", "verticalbrowse"), + "old-new-no-tabs": ("templates/old-new.pt", "old-new-no-tabs"), + "old-new": ("templates/old-new.pt", "old-new"), } def __getitem__(self, key): @@ -48,11 +49,11 @@ def __getitem__(self, key): return ViewPageTemplateFile(template).macros[macro] - class BBBMacros(BrowserView): def __getitem__(self, key): - if key=='macros': + if key == "macros": return self tpl = ViewPageTemplateFile( - '../../ZenModel/skins/zenmodel/templates.pt') + "../../ZenModel/skins/zenmodel/templates.pt" + ) return tpl.macros[key] diff --git a/Products/ZenUI3/browser/modelapi/configure.zcml b/Products/ZenUI3/browser/modelapi/configure.zcml index c1fca95649..97a18f2345 100644 --- a/Products/ZenUI3/browser/modelapi/configure.zcml +++ b/Products/ZenUI3/browser/modelapi/configure.zcml @@ -90,4 +90,33 @@ permission="zenoss.Common" /> + + + + + + + +
diff --git a/Products/ZenUI3/browser/modelapi/modelapi.py b/Products/ZenUI3/browser/modelapi/modelapi.py index 9d03ae8641..e5c33393a6 100644 --- a/Products/ZenUI3/browser/modelapi/modelapi.py +++ b/Products/ZenUI3/browser/modelapi/modelapi.py @@ -6,27 +6,40 @@ # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## + import json import logging import os + import requests + from Products.Five.browser import BrowserView + from Products import Zuul -log = logging.getLogger('zen.RMMonitor.modelapi') + +log = logging.getLogger("zen.RMMonitor.modelapi") class RabbitQueues(BrowserView): """ This view emits the RabbitMQ queues of the Zenoss system """ + def __call__(self): from Products.ZenUtils.GlobalConfig import getGlobalConfiguration + config = getGlobalConfiguration() - user = config.get('amqpuser', 'zenoss') - password = config.get('amqppassword', 'zenoss') - queueData = json.loads(requests.get('http://localhost:15672/api/queues/%2Fzenoss', - auth=requests.auth.HTTPBasicAuth(user, password)).content) - queueMaps = [dict(id=queue['name']) for queue in queueData if queue['durable']] + user = config.get("amqpuser", "zenoss") + password = config.get("amqppassword", "zenoss") + queueData = json.loads( + requests.get( + "http://localhost:15672/api/queues/%2Fzenoss", + auth=requests.auth.HTTPBasicAuth(user, password), + ).content + ) + queueMaps = [ + dict(id=queue["name"]) for queue in queueData if queue["durable"] + ] self.request.response.write(json.dumps(dict(durableQueues=queueMaps))) @@ -34,9 +47,12 @@ class ZenossRMDevice(BrowserView): """ This view emits the device level info for modeling a Zenoss system """ + def __call__(self): modelInfo = {} - modelInfo['controlplaneTenantId'] = os.environ.get('CONTROLPLANE_TENANT_ID', None) + modelInfo["controlplaneTenantId"] = os.environ.get( + "CONTROLPLANE_TENANT_ID", None + ) self.request.response.write(json.dumps(modelInfo)) @@ -44,103 +60,154 @@ class CollectorInfo(BrowserView): """ This view emits the hub component info for modeling a Zenoss system """ + def __call__(self): - appfacade = Zuul.getFacade('applications') + appfacade = Zuul.getFacade("applications") zenHubs = [] collectors = [] collectorDaemons = [] - for hub in self.context.dmd.Monitors.Hub.objectValues(spec='HubConf'): + for hub in self.context.dmd.Monitors.Hub.objectValues(spec="HubConf"): hubService = appfacade.queryHubDaemons(hub.id)[0] - zenHubs.append(dict(id='hub_{}'.format(hub.id), - title=hub.id, - controlplaneServiceId=hubService.id, - lastModeledState=str(hubService.state).lower(), - RAMCommitment=getattr(hubService, 'RAMCommitment', None), - instanceCount=hubService.instances)) + zenHubs.append( + dict( + id="hub_{}".format(hub.id), + title=hub.id, + controlplaneServiceId=hubService.id, + lastModeledState=str(hubService.state).lower(), + RAMCommitment=getattr(hubService, "RAMCommitment", None), + instanceCount=hubService.instances, + ) + ) for collector in hub.collectors(): - collectors.append(dict(id='collector_{}'.format(collector.id), - title=collector.id, - set_hub='hub_{}'.format(hub.id))) - - for collectorDaemon in appfacade.queryMonitorDaemons(collector.id): - if collectorDaemon.name in ('collectorredis', 'MetricShipper', 'zenmodeler', 'zminion'): + collectors.append( + dict( + id="collector_{}".format(collector.id), + title=collector.id, + set_hub="hub_{}".format(hub.id), + ) + ) + + for collectorDaemon in appfacade.queryMonitorDaemons( + collector.id + ): + if collectorDaemon.name in ( + "collectorredis", + "MetricShipper", + "zenmodeler", + "zminion", + ): continue - collectorDaemons.append(dict(id='{}_{}'.format(collectorDaemon.name, collector.id), - title='{} - {}'.format(collectorDaemon.name, collector.id), - controlplaneServiceId=collectorDaemon.id, - instanceCount=collectorDaemon.instances, - lastModeledState=str(collectorDaemon.state).lower(), - RAMCommitment=getattr(collectorDaemon, 'RAMCommitment', None), - set_collector='collector_{}'.format(collector.id), - monitor=collectorDaemon.autostart)) - - self.request.response.write(json.dumps(dict(zenHubs=zenHubs, - collectors=collectors, - collectorDaemons=collectorDaemons))) + collectorDaemons.append( + dict( + id="{}_{}".format( + collectorDaemon.name, collector.id + ), + title="{} - {}".format( + collectorDaemon.name, collector.id + ), + controlplaneServiceId=collectorDaemon.id, + instanceCount=collectorDaemon.instances, + lastModeledState=str( + collectorDaemon.state + ).lower(), + RAMCommitment=getattr( + collectorDaemon, "RAMCommitment", None + ), + set_collector="collector_{}".format(collector.id), + monitor=collectorDaemon.autostart, + ) + ) + + self.request.response.write( + json.dumps( + dict( + zenHubs=zenHubs, + collectors=collectors, + collectorDaemons=collectorDaemons, + ) + ) + ) class MetricServices(BrowserView): """ This view emits info for redis, MetricShipper, and MetricConsumer services """ + def __call__(self): - appfacade = Zuul.getFacade('applications') - idFormat = '{}_{}' - titleFormat = '{} - {}' + appfacade = Zuul.getFacade("applications") + idFormat = "{}_{}" + titleFormat = "{} - {}" def getRedises(svcName, metricShipperParent=None): redises = [] for svc in appfacade.query(svcName): parentName = appfacade.get(svc.parentId).name - shipperParentName = metricShipperParent if metricShipperParent else parentName - metricShipper = 'MetricShipper_{}'.format(shipperParentName) - redises.append(dict(id=idFormat.format(svc.name, parentName), - title=titleFormat.format(svc.name, parentName), - controlplaneServiceId=svc.id, - RAMCommitment=getattr(svc, 'RAMCommitment', None), - instanceCount=svc.instances, - lastModeledState=str(svc.state).lower(), - set_metricShipper=metricShipper, - )) + shipperParentName = ( + metricShipperParent if metricShipperParent else parentName + ) + metricShipper = "MetricShipper_{}".format(shipperParentName) + redises.append( + dict( + id=idFormat.format(svc.name, parentName), + title=titleFormat.format(svc.name, parentName), + controlplaneServiceId=svc.id, + RAMCommitment=getattr(svc, "RAMCommitment", None), + instanceCount=svc.instances, + lastModeledState=str(svc.state).lower(), + set_metricShipper=metricShipper, + ) + ) return redises - redises = getRedises('redis', 'Metrics') - redises.extend(getRedises('collectorredis')) + redises = getRedises("redis", "Metrics") + redises.extend(getRedises("collectorredis")) metricShippers = [] - for svc in appfacade.query('MetricShipper'): + for svc in appfacade.query("MetricShipper"): parentName = appfacade.get(svc.parentId).name - if parentName == 'Metrics': - redis = 'redis_Infrastructure' + if parentName == "Metrics": + redis = "redis_Infrastructure" else: - redis = 'collectorredis_{}'.format(parentName) - data = dict(id=idFormat.format(svc.name, appfacade.get(svc.parentId).name), - title=titleFormat.format(svc.name, appfacade.get(svc.parentId).name), - controlplaneServiceId=svc.id, - instanceCount=svc.instances, - RAMCommitment=getattr(svc, 'RAMCommitment', None), - lastModeledState=str(svc.state).lower(), - set_redis=redis, - ) + redis = "collectorredis_{}".format(parentName) + data = dict( + id=idFormat.format(svc.name, appfacade.get(svc.parentId).name), + title=titleFormat.format( + svc.name, appfacade.get(svc.parentId).name + ), + controlplaneServiceId=svc.id, + instanceCount=svc.instances, + RAMCommitment=getattr(svc, "RAMCommitment", None), + lastModeledState=str(svc.state).lower(), + set_redis=redis, + ) metricShippers.append(data) - consumerService = appfacade.query('MetricConsumer')[0] - metricConsumers = [dict(id='MetricConsumer', - title='MetricConsumer', - controlplaneServiceId=consumerService.id, - lastModeledState=str(consumerService.state).lower(), - RAMCommitment=getattr(consumerService, 'RAMCommitment', None), - instanceCount=consumerService.instances)] - - queryService = appfacade.query('CentralQuery')[0] - centralQueries = [dict(id='CentralQuery', - title='CentralQuery', - controlplaneServiceId=queryService.id, - lastModeledState=str(queryService.state).lower(), - RAMCommitment=getattr(queryService, 'RAMCommitment', None), - instanceCount=queryService.instances)] + consumerService = appfacade.query("MetricConsumer")[0] + metricConsumers = [ + dict( + id="MetricConsumer", + title="MetricConsumer", + controlplaneServiceId=consumerService.id, + lastModeledState=str(consumerService.state).lower(), + RAMCommitment=getattr(consumerService, "RAMCommitment", None), + instanceCount=consumerService.instances, + ) + ] + + queryService = appfacade.query("CentralQuery")[0] + centralQueries = [ + dict( + id="CentralQuery", + title="CentralQuery", + controlplaneServiceId=queryService.id, + lastModeledState=str(queryService.state).lower(), + RAMCommitment=getattr(queryService, "RAMCommitment", None), + instanceCount=queryService.instances, + ) + ] data = dict( redises=redises, @@ -156,18 +223,22 @@ class BaseApiView(BrowserView): """ Base for several highly similar views """ + def __init__(self, context, request): super(BaseApiView, self).__init__(context, request) - self._appfacade = Zuul.getFacade('applications') + self._appfacade = Zuul.getFacade("applications") def _getServices(self, svcName): - return [dict(id=svc.name, - controlplaneServiceId=svc.id, - instanceCount=svc.instances, - RAMCommitment=getattr(svc, 'RAMCommitment', None), - lastModeledState=str(svc.state).lower(), - ) - for svc in self._appfacade.query(svcName)] + return [ + dict( + id=svc.name, + controlplaneServiceId=svc.id, + instanceCount=svc.instances, + RAMCommitment=getattr(svc, "RAMCommitment", None), + lastModeledState=str(svc.state).lower(), + ) + for svc in self._appfacade.query(svcName) + ] def __call__(self): data = {} @@ -180,12 +251,13 @@ class EventDaemons(BaseApiView): """ This view emits info for zeneventd, zeneventserver, and zenactiond """ + @property def _services(self): return ( - ('zenEventDs', 'zeneventd'), - ('zenEventServers', 'zeneventserver'), - ('zenActionDs', 'zenactiond'), + ("zenEventDs", "zeneventd"), + ("zenEventServers", "zeneventserver"), + ("zenActionDs", "zenactiond"), ) @@ -193,83 +265,91 @@ class Solr(BaseApiView): """ This view emits the Solr stats of the Zenoss system """ + @property def _services(self): - return ( - ('solrs', 'Solr'), - ) + return (("solrs", "Solr"),) class ZenModeler(BaseApiView): """ This view emits info regarding zenmodelers of the Zenoss system """ + @property def _services(self): - return ( - ('zenModelers', 'zenmodeler'), - ) + return (("zenModelers", "zenmodeler"),) def _getServices(self, svcName): - idFormat = '{}_{}' - titleFormat = '{} - {}' - return [dict(id=idFormat.format(svc.name, self._appfacade.get(svc.parentId).name), - title=titleFormat.format(svc.name, self._appfacade.get(svc.parentId).name), - controlplaneServiceId=svc.id, - instanceCount=svc.instances, - RAMCommitment=getattr(svc, 'RAMCommitment', None), - lastModeledState=str(svc.state).lower()) - for svc in self._appfacade.query(svcName)] + idFormat = "{}_{}" + titleFormat = "{} - {}" + return [ + dict( + id=idFormat.format( + svc.name, self._appfacade.get(svc.parentId).name + ), + title=titleFormat.format( + svc.name, self._appfacade.get(svc.parentId).name + ), + controlplaneServiceId=svc.id, + instanceCount=svc.instances, + RAMCommitment=getattr(svc, "RAMCommitment", None), + lastModeledState=str(svc.state).lower(), + ) + for svc in self._appfacade.query(svcName) + ] class RegionServer(BaseApiView): """ This view emits info for the HBase regionservers in the Zenoss application. """ + @property def _services(self): - return ( - ('regionServers', 'RegionServer'), - ) + return (("regionServers", "RegionServer"),) def _getServices(self, svcName): - #ZEN-30188 there is bigtable instead of RegionServers svc = next(iter(self._appfacade.query(svcName)), None) - if not svc: return [] + if not svc: + return [] count = svc.instances - titleFormat = '{} - {}' - return [dict(id=str(i), - title=titleFormat.format(svc.name, i)) - for i in range(count)] + titleFormat = "{} - {}" + return [ + dict(id=str(i), title=titleFormat.format(svc.name, i)) + for i in range(count) + ] class Zope(BaseApiView): """ Zope info """ + @property def _services(self): - return ( - ('zopes', 'zopes'), - ) + return (("zopes", "zopes"),) def _getServiceInstances(self, name): - idFormat = '{}_{}' - titleFormat = '{} - {}' + idFormat = "{}_{}" + titleFormat = "{} - {}" services = self._appfacade.query(name) if not services: return [] svc = services[0] - data = [dict(id=idFormat.format(name, i), - title=titleFormat.format(name, i)) - for i in range(svc.instances)] + data = [ + dict( + id=idFormat.format(name, i), title=titleFormat.format(name, i) + ) + for i in range(svc.instances) + ] return data def _getServices(self, svcName): - zopes = self._getServiceInstances('Zope') - zopes += self._getServiceInstances('zenapi') - zopes += self._getServiceInstances('zenreports') - zopes += self._getServiceInstances('Zauth') + zopes = self._getServiceInstances("Zope") + zopes += self._getServiceInstances("zenapi") + zopes += self._getServiceInstances("zenreports") + zopes += self._getServiceInstances("Zauth") return zopes @@ -278,27 +358,23 @@ class ZODB(BaseApiView): """ This view emits the ZODB stats of the Zenoss system """ + @property def _services(self): - return ( - ('zodbs', 'mariadb-model'), - ) + return (("zodbs", "mariadb-model"),) class Reader(BaseApiView): """ This view emits the reader stats of the Zenoss system """ + @property def _services(self): - return ( - ('readers', 'reader'), - ) + return (("readers", "reader"),) def _getServices(self, svcName): - readers = super(Reader, self)._getServices('reader') - #ZEN-30188 handle name change of the otsdb services in GCP - readers += super(Reader, self)._getServices('reader-bigtable') + readers = super(Reader, self)._getServices("reader") return readers @@ -306,14 +382,75 @@ class Writer(BaseApiView): """ This view emits the writer stats of the Zenoss system """ + + @property + def _services(self): + return (("writers", "writer"),) + + def _getServices(self, svcName): + writers = super(Writer, self)._getServices("writer") + return writers + +class ImpactDaemons(BaseApiView): + """ + This view emits the Impact daemon services + """ + @property + def _services(self): + return ( + ('impacts', 'Impact'), + ('zenImpactStates', 'zenimpactstate'), + ) + +class Memcached(BaseApiView): + @property def _services(self): return ( - ('writers', 'writer'), + ('memcacheds', 'memcacheds'), ) + def _getServiceInstances(self, name): + idFormat = '{}' + titleFormat = '{}' + services = self._appfacade.query(name) + if not services: + return [] + svc = services[0] + data = [dict(id=idFormat.format(name), + title=titleFormat.format(name), + controlplaneServiceId=svc.id, + instanceCount=svc.instances, + RAMCommitment=getattr(svc, 'RAMCommitment', None), + lastModeledState=str(svc.state).lower() + ) + for i in range(svc.instances)] + return data + def _getServices(self, svcName): - writers = super(Writer, self)._getServices('writer') - #ZEN-30188 handle name change of the otsdb services in GCP - writers += super(Writer, self)._getServices('writer-bigtable') - return writers + memcacheds = self._getServiceInstances('memcached') + memcacheds += self._getServiceInstances('memcached-session') + return memcacheds + +class ConfigCacheDaemons(BaseApiView): + """ + This view emits info for configcache services: invalidator, builders, and manager + """ + @property + def _services(self): + return ( + ('configCacheInvalidators', 'invalidator'), + ('configCacheBuilders', 'builder'), + ('configCacheManagers', 'manager'), + ) + +class ZenjobsMonitor(BaseApiView): + """ + This view emits info for zenjobs-monitor + """ + @property + def _services(self): + return ( + ('zenjobsMonitors', 'zenjobs-monitor'), + ) + diff --git a/Products/ZenUI3/browser/pages.py b/Products/ZenUI3/browser/pages.py index a410ae7db0..1c15000325 100644 --- a/Products/ZenUI3/browser/pages.py +++ b/Products/ZenUI3/browser/pages.py @@ -6,21 +6,26 @@ # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## -import json +import json import logging -log = logging.getLogger("zen.browser.pages") + +from Products.Five.browser import BrowserView +from Products.Five.browser.pagetemplatefile import ( + ZopeTwoPageTemplateFile, + ViewPageTemplateFile, +) from Products import Zuul +from Products.ZenUtils.controlplane.application import getConnectionSettings +from Products.ZenUtils.controlplane.client import ControlPlaneClient +from Products.ZenUtils.Utils import getPasswordFields, maskSecureProperties from Products.Zuul.interfaces import IInfo -from Products.Five.browser import BrowserView -from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile, ViewPageTemplateFile from Products.Zuul.routers.device import DeviceRouter from Products.Zuul.routers.nav import DetailNavRouter -from Products.ZenUtils.controlplane.client import ControlPlaneClient -from Products.ZenUtils.controlplane.application import getConnectionSettings -from Products.ZenUtils.Utils import getPasswordFields, maskSecureProperties +log = logging.getLogger("zen.browser.pages") + class DaemonsView(BrowserView): @@ -49,84 +54,104 @@ def _doCCLogin(self): cookies = None try: cookies = cpClient.cookies() - except Exception as e: - log.warn('Unable to log into Control Center, log viewing functionality may be impacted') + except Exception: + log.warn( + "Unable to log into Control Center, " + "log viewing functionality may be impacted" + ) return for cookie in cookies: self.request.response.setCookie( - name = cookie['name'], - value = cookie['value'], - quoted = True, - domain = self.request.environ['HTTP_HOST'].split(':')[0], # Don't include the port - path = '/', - secure = cookie['secure'] + name=cookie["name"], + value=cookie["value"], + quoted=True, + domain=self.request.environ["HTTP_HOST"].split(":")[ + 0 + ], # Don't include the port + path="/", + secure=cookie["secure"], ) + class ITInfrastructure(BrowserView): __call__ = ZopeTwoPageTemplateFile("templates/itinfrastructure.pt") def getTrees(self): - router = DeviceRouter(self.context.dmd, {}); + router = DeviceRouter(self.context.dmd, {}) method = router.getTree - settings = self.context.dmd.UserInterfaceSettings.getInterfaceSettings() - if settings['incrementalTreeLoad']: + settings = ( + self.context.dmd.UserInterfaceSettings.getInterfaceSettings() + ) + if settings["incrementalTreeLoad"]: method = router.asyncGetTree - deviceTree = method('/zport/dmd/Devices') + deviceTree = method("/zport/dmd/Devices") # system - systemTree = method('/zport/dmd/Systems') + systemTree = method("/zport/dmd/Systems") # groups - groupTree = method('/zport/dmd/Groups') + groupTree = method("/zport/dmd/Groups") # location - locTree = method('/zport/dmd/Locations') - js = """ + locTree = method("/zport/dmd/Locations") + js = """ Zenoss.env.device_tree_data = %s; Zenoss.env.system_tree_data = %s; Zenoss.env.group_tree_data = %s; Zenoss.env.location_tree_data = %s; - """ % (json.dumps(deviceTree), - json.dumps(systemTree), - json.dumps(groupTree), - json.dumps(locTree)) + """ % ( + json.dumps(deviceTree), + json.dumps(systemTree), + json.dumps(groupTree), + json.dumps(locTree), + ) return js class DeviceDetails(BrowserView): - __call__ = ZopeTwoPageTemplateFile('templates/devdetail.pt') + __call__ = ZopeTwoPageTemplateFile("templates/devdetail.pt") def getComponentTree(self): - router = DeviceRouter(self.context.dmd, {}); + router = DeviceRouter(self.context.dmd, {}) uid = self.context.getPrimaryId() tree = router.getComponentTree(uid) js = """ Zenoss.env.componentTree = %s; - """ % json.dumps(tree) + """ % json.dumps( + tree + ) return js def fetchLeftHandMenu(self): router = DetailNavRouter(self.context.dmd, {}) - menuIds = ['More','Add','TopLevel','Manage'] + menuIds = ["More", "Add", "TopLevel", "Manage"] uid = self.context.getPrimaryId() response = router.getDetailNavConfigs(uid=uid, menuIds=menuIds) js = """ Zenoss.env.lefthandnav = %s; - """ % json.dumps(response.data) + """ % json.dumps( + response.data + ) return js def getInfoObject(self): info = IInfo(self.context) # links is very expensive so do not marshal that - keys = [key for key in dir(info) - if (not key.startswith('_') - and key not in ('links', 'uptime', 'events', 'deviceClass') - and not callable(getattr(info, key))) - ] + keys = [ + key + for key in dir(info) + if ( + not key.startswith("_") + and key not in ("links", "uptime", "events", "deviceClass") + and not callable(getattr(info, key)) + ) + ] secure_properties = getPasswordFields(info) data = Zuul.marshal(info, keys) maskSecureProperties(data, secure_properties) response = dict(data=data) js = """ Zenoss.env.infoObject = %s; - """ % (json.dumps(response)) + """ % ( + json.dumps(response) + ) return js diff --git a/Products/ZenUI3/browser/resources.py b/Products/ZenUI3/browser/resources.py index 8d80049def..bbcfac4c4e 100644 --- a/Products/ZenUI3/browser/resources.py +++ b/Products/ZenUI3/browser/resources.py @@ -1,41 +1,45 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - +import json import os import re -import json -import logging -from zope.interface import implements + +from zope.interface import implementer from Products.Five.browser import BrowserView from Products.Five.viewlet.manager import ViewletManagerBase -from Products.ZenUI3.browser.interfaces import IHeadExtraManager + +from .interfaces import IHeadExtraManager + def _checkForCompiledJSFile(): - COMPILED_JS_FILE = os.path.join(os.path.dirname(__file__), - 'resources/js/deploy/zenoss-compiled.js') + COMPILED_JS_FILE = os.path.join( + os.path.dirname(__file__), "resources/js/deploy/zenoss-compiled.js" + ) return os.path.exists(COMPILED_JS_FILE) -JSBFILE = os.path.join(os.path.dirname(__file__), 'zenoss.jsb2') + +JSBFILE = os.path.join(os.path.dirname(__file__), "zenoss.jsb2") COMPILED_JS_EXISTS = _checkForCompiledJSFile() + class ExtJSShortcut(BrowserView): def __getitem__(self, name): - return self.context.unrestrictedTraverse('++resource++extjs')[name] + return self.context.unrestrictedTraverse("++resource++extjs")[name] class ZenUIResourcesShortcut(BrowserView): def __getitem__(self, name): - return self.context.unrestrictedTraverse('++resource++zenui')[name] + return self.context.unrestrictedTraverse("++resource++zenui")[name] -def get_js_file_list(pkg='Zenoss Application'): +def get_js_file_list(pkg="Zenoss Application"): """ Parse the JSBuilder2 config file to get a list of file names in the same order as that used by JSBuilder to generate its version. @@ -44,20 +48,20 @@ def get_js_file_list(pkg='Zenoss Application'): paths = [] try: cfg = json.load(jsb) - for p in cfg['pkgs']: - if p['name']==pkg: - for f in p['fileIncludes']: - path = re.sub('^resources', 'zenui', f['path']) - paths.append(path + f['text']) + for p in cfg["pkgs"]: + if p["name"] == pkg: + for f in p["fileIncludes"]: + newpath = re.sub("^resources", "zenui", f["path"]) + paths.append(newpath + f["text"]) finally: jsb.close() - return [ str(path) for path in paths ] + return [str(path) for path in paths] class PIEdotHTC(BrowserView): def __call__(self): - self.request.response.setHeader('Content-Type', 'text/x-component') - with open(os.path.join(os.path.dirname(__file__), 'PIE.htc')) as f: + self.request.response.setHeader("Content-Type", "text/x-component") + with open(os.path.join(os.path.dirname(__file__), "PIE.htc")) as f: return f.read() @@ -67,17 +71,18 @@ class ZenossJavaScript(BrowserView): minified version of the Zenoss JavaScript, concatenates them, and returns the output. """ + def __call__(self): - self.request.response.setHeader('Content-Type', 'text/javascript') + self.request.response.setHeader("Content-Type", "text/javascript") src = [] for p in get_js_file_list(): fob = self.context.unrestrictedTraverse(p) src.append(fob.GET()) - return '\n'.join(src) + return "\n".join(src) +@implementer(IHeadExtraManager) class HeadExtraManager(ViewletManagerBase): """ Simple viewlet manager allowing people to plug into . """ - implements(IHeadExtraManager) diff --git a/Products/ZenUI3/browser/resources/js/timezone/moment-timezone-with-data.min.js b/Products/ZenUI3/browser/resources/js/timezone/moment-timezone-with-data.min.js index b2d8851ce8..540d0550a2 100644 --- a/Products/ZenUI3/browser/resources/js/timezone/moment-timezone-with-data.min.js +++ b/Products/ZenUI3/browser/resources/js/timezone/moment-timezone-with-data.min.js @@ -1,7 +1 @@ -//! moment-timezone.js -//! version : 0.4.0 -//! author : Tim Wood -//! license : MIT -//! github.com/moment/moment-timezone -!function(a,b){"use strict";"function"==typeof define&&define.amd?define(["moment"],b):"object"==typeof exports?module.exports=b(require("moment")):b(a.moment)}(this,function(a){"use strict";function b(a){return a>96?a-87:a>64?a-29:a-48}function c(a){var c,d=0,e=a.split("."),f=e[0],g=e[1]||"",h=1,i=0,j=1;for(45===a.charCodeAt(0)&&(d=1,j=-1),d;dc;c++)a[c]=Math.round((a[c-1]||0)+6e4*a[c]);a[b-1]=1/0}function f(a,b){var c,d=[];for(c=0;cz||2===z&&6>A)&&q("Moment Timezone requires Moment.js >= 2.6.0. You are using Moment.js "+a.version+". See momentjs.com"),h.prototype={_set:function(a){this.name=a.name,this.abbrs=a.abbrs,this.untils=a.untils,this.offsets=a.offsets},_index:function(a){var b,c=+a,d=this.untils;for(b=0;be;e++)if(b=g[e],c=g[e+1],d=g[e?e-1:e],c>b&&r.moveAmbiguousForward?b=c:b>d&&r.moveInvalidForward&&(b=d),fz||2===z&&9>A)&&q("Moment Timezone setDefault() requires Moment.js >= 2.9.0. You are using Moment.js "+a.version+"."),a.defaultZone=b?k(b):null,a};var C=a.momentProperties;return"[object Array]"===Object.prototype.toString.call(C)?(C.push("_z"),C.push("_a")):C&&(C._z=null),n({version:"2015d",zones:["Africa/Abidjan|LMT GMT|g.8 0|01|-2ldXH.Q","Africa/Accra|LMT GMT GHST|.Q 0 -k|012121212121212121212121212121212121212121212121|-26BbX.8 6tzX.8 MnE 1BAk MnE 1BAk MnE 1BAk MnE 1C0k MnE 1BAk MnE 1BAk MnE 1BAk MnE 1C0k MnE 1BAk MnE 1BAk MnE 1BAk MnE 1C0k MnE 1BAk MnE 1BAk MnE 1BAk MnE 1C0k MnE 1BAk MnE 1BAk MnE 1BAk MnE 1C0k MnE 1BAk MnE 1BAk MnE","Africa/Addis_Ababa|LMT EAT BEAT BEAUT|-2r.g -30 -2u -2J|01231|-1F3Cr.g 3Dzr.g okMu MFXJ","Africa/Algiers|PMT WET WEST CET CEST|-9.l 0 -10 -10 -20|0121212121212121343431312123431213|-2nco9.l cNb9.l HA0 19A0 1iM0 11c0 1oo0 Wo0 1rc0 QM0 1EM0 UM0 DA0 Imo0 rd0 De0 9Xz0 1fb0 1ap0 16K0 2yo0 mEp0 hwL0 jxA0 11A0 dDd0 17b0 11B0 1cN0 2Dy0 1cN0 1fB0 1cL0","Africa/Bangui|LMT WAT|-d.A -10|01|-22y0d.A","Africa/Bissau|LMT WAT GMT|12.k 10 0|012|-2ldWV.E 2xonV.E","Africa/Blantyre|LMT CAT|-2a.k -20|01|-2GJea.k","Africa/Cairo|EET EEST|-20 -30|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-1bIO0 vb0 1ip0 11z0 1iN0 1nz0 12p0 1pz0 10N0 1pz0 16p0 1jz0 s3d0 Vz0 1oN0 11b0 1oO0 10N0 1pz0 10N0 1pb0 10N0 1pb0 10N0 1pb0 10N0 1pz0 10N0 1pb0 10N0 1pb0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1WL0 rd0 1Rz0 wp0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1qL0 Xd0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1ny0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 WL0 1qN0 Rb0 1wp0 On0 1zd0 Lz0 1EN0 Fb0 c10 8n0 8Nd0 gL0 e10 mn0","Africa/Casablanca|LMT WET WEST CET|u.k 0 -10 -10|012121212121212121312121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2gMnt.E 130Lt.E rb0 Dd0 dVb0 b6p0 TX0 EoB0 LL0 gnd0 rz0 43d0 AL0 1Nd0 XX0 1Cp0 pz0 dEp0 4mn0 SyN0 AL0 1Nd0 wn0 1FB0 Db0 1zd0 Lz0 1Nf0 wM0 co0 go0 1o00 s00 dA0 vc0 11A0 A00 e00 y00 11A0 uo0 e00 DA0 11A0 rA0 e00 Jc0 WM0 m00 gM0 M00 WM0 jc0 e00 RA0 11A0 dA0 e00 Uo0 11A0 800 gM0 Xc0 11A0 5c0 e00 17A0 WM0 2o0 e00 1ao0 19A0 1g00 16M0 1iM0 1400 1lA0 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qo0 1200 1kM0 14M0 1i00","Africa/Ceuta|WET WEST CET CEST|0 -10 -10 -20|010101010101010101010232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-25KN0 11z0 drd0 18o0 3I00 17c0 1fA0 1a00 1io0 1a00 1y7p0 LL0 gnd0 rz0 43d0 AL0 1Nd0 XX0 1Cp0 pz0 dEp0 4VB0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Africa/El_Aaiun|LMT WAT WET WEST|Q.M 10 0 -10|0123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1rDz7.c 1GVA7.c 6L0 AL0 1Nd0 XX0 1Cp0 pz0 1cBB0 AL0 1Nd0 wn0 1FB0 Db0 1zd0 Lz0 1Nf0 wM0 co0 go0 1o00 s00 dA0 vc0 11A0 A00 e00 y00 11A0 uo0 e00 DA0 11A0 rA0 e00 Jc0 WM0 m00 gM0 M00 WM0 jc0 e00 RA0 11A0 dA0 e00 Uo0 11A0 800 gM0 Xc0 11A0 5c0 e00 17A0 WM0 2o0 e00 1ao0 19A0 1g00 16M0 1iM0 1400 1lA0 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qo0 1200 1kM0 14M0 1i00","Africa/Johannesburg|SAST SAST SAST|-1u -20 -30|012121|-2GJdu 1Ajdu 1cL0 1cN0 1cL0","Africa/Juba|LMT CAT CAST EAT|-2a.8 -20 -30 -30|01212121212121212121212121212121213|-1yW2a.8 1zK0a.8 16L0 1iN0 17b0 1jd0 17b0 1ip0 17z0 1i10 17X0 1hB0 18n0 1hd0 19b0 1gp0 19z0 1iN0 17b0 1ip0 17z0 1i10 18n0 1hd0 18L0 1gN0 19b0 1gp0 19z0 1iN0 17z0 1i10 17X0 yGd0","Africa/Monrovia|MMT LRT GMT|H.8 I.u 0|012|-23Lzg.Q 29s01.m","Africa/Ndjamena|LMT WAT WAST|-10.c -10 -20|0121|-2le10.c 2J3c0.c Wn0","Africa/Tripoli|LMT CET CEST EET|-Q.I -10 -20 -20|012121213121212121212121213123123|-21JcQ.I 1hnBQ.I vx0 4iP0 xx0 4eN0 Bb0 7ip0 U0n0 A10 1db0 1cN0 1db0 1dd0 1db0 1eN0 1bb0 1e10 1cL0 1c10 1db0 1dd0 1db0 1cN0 1db0 1q10 fAn0 1ep0 1db0 AKq0 TA0 1o00","Africa/Tunis|PMT CET CEST|-9.l -10 -20|0121212121212121212121212121212121|-2nco9.l 18pa9.l 1qM0 DA0 3Tc0 11B0 1ze0 WM0 7z0 3d0 14L0 1cN0 1f90 1ar0 16J0 1gXB0 WM0 1rA0 11c0 nwo0 Ko0 1cM0 1cM0 1rA0 10M0 zuM0 10N0 1aN0 1qM0 WM0 1qM0 11A0 1o00","Africa/Windhoek|SWAT SAST SAST CAT WAT WAST|-1u -20 -30 -20 -10 -20|012134545454545454545454545454545454545454545454545454545454545454545454545454545454545454545|-2GJdu 1Ajdu 1cL0 1SqL0 9NA0 11D0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0","America/Adak|NST NWT NPT BST BDT AHST HST HDT|b0 a0 a0 b0 a0 a0 a0 90|012034343434343434343434343434343456767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-17SX0 8wW0 iB0 Qlb0 52O0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cm0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Anchorage|CAT CAWT CAPT AHST AHDT YST AKST AKDT|a0 90 90 a0 90 90 90 80|012034343434343434343434343434343456767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-17T00 8wX0 iA0 Qlb0 52O0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cm0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Anguilla|LMT AST|46.4 40|01|-2kNvR.U","America/Araguaina|LMT BRT BRST|3c.M 30 20|0121212121212121212121212121212121212121212121212121|-2glwL.c HdKL.c 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 dMN0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 ny10 Lz0","America/Argentina/Buenos_Aires|CMT ART ARST ART ARST|4g.M 40 30 30 20|0121212121212121212121212121212121212121213434343434343234343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wp0 Rb0 1wp0 TX0 g0p0 10M0 j3c0 uL0 1qN0 WL0","America/Argentina/Catamarca|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|0121212121212121212121212121212121212121213434343454343235343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 g0p0 10M0 ako0 7B0 8zb0 uL0","America/Argentina/Cordoba|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|0121212121212121212121212121212121212121213434343454343234343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 g0p0 10M0 j3c0 uL0 1qN0 WL0","America/Argentina/Jujuy|CMT ART ARST ART ARST WART WARST|4g.M 40 30 30 20 40 30|01212121212121212121212121212121212121212134343456543432343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1ze0 TX0 1ld0 WK0 1wp0 TX0 g0p0 10M0 j3c0 uL0","America/Argentina/La_Rioja|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|01212121212121212121212121212121212121212134343434534343235343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Qn0 qO0 16n0 Rb0 1wp0 TX0 g0p0 10M0 ako0 7B0 8zb0 uL0","America/Argentina/Mendoza|CMT ART ARST ART ARST WART WARST|4g.M 40 30 30 20 40 30|0121212121212121212121212121212121212121213434345656543235343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1u20 SL0 1vd0 Tb0 1wp0 TW0 g0p0 10M0 agM0 Op0 7TX0 uL0","America/Argentina/Rio_Gallegos|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|0121212121212121212121212121212121212121213434343434343235343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wp0 Rb0 1wp0 TX0 g0p0 10M0 ako0 7B0 8zb0 uL0","America/Argentina/Salta|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|01212121212121212121212121212121212121212134343434543432343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 g0p0 10M0 j3c0 uL0","America/Argentina/San_Juan|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|01212121212121212121212121212121212121212134343434534343235343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Qn0 qO0 16n0 Rb0 1wp0 TX0 g0p0 10M0 ak00 m10 8lb0 uL0","America/Argentina/San_Luis|CMT ART ARST ART ARST WART WARST|4g.M 40 30 30 20 40 30|01212121212121212121212121212121212121212134343456536353465653|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 XX0 1q20 SL0 AN0 kin0 10M0 ak00 m10 8lb0 8L0 jd0 1qN0 WL0 1qN0","America/Argentina/Tucuman|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|012121212121212121212121212121212121212121343434345434323534343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 g0p0 10M0 ako0 4N0 8BX0 uL0 1qN0 WL0","America/Argentina/Ushuaia|CMT ART ARST ART ARST WART|4g.M 40 30 30 20 40|0121212121212121212121212121212121212121213434343434343235343|-20UHH.c pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wp0 Rb0 1wp0 TX0 g0p0 10M0 ajA0 8p0 8zb0 uL0","America/Aruba|LMT ANT AST|4z.L 4u 40|012|-2kV7o.d 28KLS.d","America/Asuncion|AMT PYT PYT PYST|3O.E 40 30 30|012131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313|-1x589.k 1DKM9.k 3CL0 3Dd0 10L0 1pB0 10n0 1pB0 10n0 1pB0 1cL0 1dd0 1db0 1dd0 1cL0 1dd0 1cL0 1dd0 1cL0 1dd0 1db0 1dd0 1cL0 1dd0 1cL0 1dd0 1cL0 1dd0 1db0 1dd0 1cL0 1lB0 14n0 1dd0 1cL0 1fd0 WL0 1rd0 1aL0 1dB0 Xz0 1qp0 Xb0 1qN0 10L0 1rB0 TX0 1tB0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 WN0 1qL0 11B0 1nX0 1ip0 WL0 1qN0 WL0 1qN0 WL0 1tB0 TX0 1tB0 TX0 1tB0 19X0 1a10 1fz0 1a10 1fz0 1cN0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0","America/Atikokan|CST CDT CWT CPT EST|60 50 50 50 50|0101234|-25TQ0 1in0 Rnb0 3je0 8x30 iw0","America/Bahia|LMT BRT BRST|2y.4 30 20|01212121212121212121212121212121212121212121212121212121212121|-2glxp.U HdLp.U 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 l5B0 Rb0","America/Bahia_Banderas|LMT MST CST PST MDT CDT|71 70 60 80 60 50|0121212131414141414141414141414141414152525252525252525252525252525252525252525252525252525252|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 otX0 gmN0 P2N0 13Vd0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nW0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Barbados|LMT BMT AST ADT|3W.t 3W.t 40 30|01232323232|-1Q0I1.v jsM0 1ODC1.v IL0 1ip0 17b0 1ip0 17b0 1ld0 13b0","America/Belem|LMT BRT BRST|3d.U 30 20|012121212121212121212121212121|-2glwK.4 HdKK.4 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0","America/Belize|LMT CST CHDT CDT|5Q.M 60 5u 50|01212121212121212121212121212121212121212121212121213131|-2kBu7.c fPA7.c Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1f0Mu qn0 lxB0 mn0","America/Blanc-Sablon|AST ADT AWT APT|40 30 30 30|010230|-25TS0 1in0 UGp0 8x50 iu0","America/Boa_Vista|LMT AMT AMST|42.E 40 30|0121212121212121212121212121212121|-2glvV.k HdKV.k 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 smp0 WL0 1tB0 2L0","America/Bogota|BMT COT COST|4U.g 50 40|0121|-2eb73.I 38yo3.I 2en0","America/Boise|PST PDT MST MWT MPT MDT|80 70 70 60 60 60|0101023425252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252|-261q0 1nX0 11B0 1nX0 8C10 JCL0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 Dd0 1Kn0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Cambridge_Bay|zzz MST MWT MPT MDDT MDT CST CDT EST|0 70 60 60 50 60 60 50 50|0123141515151515151515151515151515151515151515678651515151515151515151515151515151515151515151515151515151515151515151515151|-21Jc0 RO90 8x20 ix0 LCL0 1fA0 zgO0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11A0 1nX0 2K0 WQ0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Campo_Grande|LMT AMT AMST|3C.s 40 30|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-2glwl.w HdLl.w 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 1C10 Lz0 1Ip0 HX0 1zd0 On0 1HB0 IL0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10 Lz0 1C10 On0 1zd0 Rb0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0","America/Cancun|LMT CST EST EDT CDT|5L.4 60 50 40 50|0123232341414141414141414141414141414141412|-1UQG0 2q2o0 yLB0 1lb0 14p0 1lb0 14p0 Lz0 xB0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 Dd0","America/Caracas|CMT VET VET|4r.E 4u 40|0121|-2kV7w.k 28KM2.k 1IwOu","America/Cayenne|LMT GFT GFT|3t.k 40 30|012|-2mrwu.E 2gWou.E","America/Cayman|CMT EST|5j.A 50|01|-2uduE.o","America/Chicago|CST CDT EST CWT CPT|60 50 50 50 50|01010101010101010101010101010101010102010101010103401010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261s0 1nX0 11B0 1nX0 1wp0 TX0 WN0 1qL0 1cN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 11B0 1Hz0 14p0 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 RB0 8x30 iw0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Chihuahua|LMT MST CST CDT MDT|74.k 70 60 50 60|0121212323241414141414141414141414141414141414141414141414141414141414141414141414141414141|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 2zQN0 1lb0 14p0 1lb0 14q0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Costa_Rica|SJMT CST CDT|5A.d 60 50|0121212121|-1Xd6n.L 2lu0n.L Db0 1Kp0 Db0 pRB0 15b0 1kp0 mL0","America/Creston|MST PST|70 80|010|-29DR0 43B0","America/Cuiaba|LMT AMT AMST|3I.k 40 30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-2glwf.E HdLf.E 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 4a10 HX0 1zd0 On0 1HB0 IL0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10 Lz0 1C10 On0 1zd0 Rb0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0","America/Danmarkshavn|LMT WGT WGST GMT|1e.E 30 20 0|01212121212121212121212121212121213|-2a5WJ.k 2z5fJ.k 19U0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 DC0","America/Dawson|YST YDT YWT YPT YDDT PST PDT|90 80 80 80 70 80 70|0101023040565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-25TN0 1in0 1o10 13V0 Ser0 8x00 iz0 LCL0 1fA0 jrA0 fNd0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Dawson_Creek|PST PDT PWT PPT MST|80 70 70 70 70|0102301010101010101010101010101010101010101010101010101014|-25TO0 1in0 UGp0 8x10 iy0 3NB0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 ML0","America/Denver|MST MDT MWT MPT|70 60 60 60|01010101023010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261r0 1nX0 11B0 1nX0 11B0 1qL0 WN0 mn0 Ord0 8x20 ix0 LCN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Detroit|LMT CST EST EWT EPT EDT|5w.b 60 50 40 40 40|01234252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252|-2Cgir.N peqr.N 156L0 8x40 iv0 6fd0 11z0 Jy10 SL0 dnB0 1cL0 s10 1Vz0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Edmonton|LMT MST MDT MWT MPT|7x.Q 70 60 60 60|01212121212121341212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2yd4q.8 shdq.8 1in0 17d0 hz0 2dB0 1fz0 1a10 11z0 1qN0 WL0 1qN0 11z0 IGN0 8x20 ix0 3NB0 11z0 LFB0 1cL0 3Cp0 1cL0 66N0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Eirunepe|LMT ACT ACST AMT|4D.s 50 40 40|0121212121212121212121212121212131|-2glvk.w HdLk.w 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 dPB0 On0 yTd0 d5X0","America/El_Salvador|LMT CST CDT|5U.M 60 50|012121|-1XiG3.c 2Fvc3.c WL0 1qN0 WL0","America/Ensenada|LMT MST PST PDT PWT PPT|7M.4 70 80 70 70 70|012123245232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1UQE0 4PX0 8mM0 8lc0 SN0 1cL0 pHB0 83r0 zI0 5O10 1Rz0 cOP0 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 BUp0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Fort_Wayne|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|010101023010101010101010101040454545454545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 QI10 Db0 RB0 8x30 iw0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 5Tz0 1o10 qLb0 1cL0 1cN0 1cL0 1qhd0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Fortaleza|LMT BRT BRST|2y 30 20|0121212121212121212121212121212121212121|-2glxq HdLq 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 nsp0 WL0 1tB0 5z0 2mN0 On0","America/Glace_Bay|LMT AST ADT AWT APT|3X.M 40 30 30 30|012134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2IsI0.c CwO0.c 1in0 UGp0 8x50 iu0 iq10 11z0 Jg10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Godthab|LMT WGT WGST|3q.U 30 20|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2a5Ux.4 2z5dx.4 19U0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","America/Goose_Bay|NST NDT NST NDT NWT NPT AST ADT ADDT|3u.Q 2u.Q 3u 2u 2u 2u 40 30 20|010232323232323245232323232323232323232323232323232323232326767676767676767676767676767676767676767676768676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-25TSt.8 1in0 DXb0 2HbX.8 WL0 1qN0 WL0 1qN0 WL0 1tB0 TX0 1tB0 WL0 1qN0 WL0 1qN0 7UHu itu 1tB0 WL0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1tB0 WL0 1ld0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 S10 g0u 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14n1 1lb0 14p0 1nW0 11C0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zcX Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Grand_Turk|KMT EST EDT AST|57.b 50 40 40|0121212121212121212121212121212121212121212121212121212121212121212121212123|-2l1uQ.N 2HHBQ.N 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Guatemala|LMT CST CDT|62.4 60 50|0121212121|-24KhV.U 2efXV.U An0 mtd0 Nz0 ifB0 17b0 zDB0 11z0","America/Guayaquil|QMT ECT|5e 50|01|-1yVSK","America/Guyana|LMT GBGT GYT GYT GYT|3Q.E 3J 3J 30 40|01234|-2dvU7.k 24JzQ.k mlc0 Bxbf","America/Halifax|LMT AST ADT AWT APT|4e.o 40 30 30 30|0121212121212121212121212121212121212121212121212134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2IsHJ.A xzzJ.A 1db0 3I30 1in0 3HX0 IL0 1E10 ML0 1yN0 Pb0 1Bd0 Mn0 1Bd0 Rz0 1w10 Xb0 1w10 LX0 1w10 Xb0 1w10 Lz0 1C10 Jz0 1E10 OL0 1yN0 Un0 1qp0 Xb0 1qp0 11X0 1w10 Lz0 1HB0 LX0 1C10 FX0 1w10 Xb0 1qp0 Xb0 1BB0 LX0 1td0 Xb0 1qp0 Xb0 Rf0 8x50 iu0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 3Qp0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 3Qp0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 6i10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Havana|HMT CST CDT|5t.A 50 40|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1Meuu.o 72zu.o ML0 sld0 An0 1Nd0 Db0 1Nd0 An0 6Ep0 An0 1Nd0 An0 JDd0 Mn0 1Ap0 On0 1fd0 11X0 1qN0 WL0 1wp0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 14n0 1ld0 14L0 1kN0 15b0 1kp0 1cL0 1cN0 1fz0 1a10 1fz0 1fB0 11z0 14p0 1nX0 11B0 1nX0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 1a10 1in0 1a10 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 17c0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 11A0 6i00 Rc0 1wo0 U00 1tA0 Rc0 1wo0 U00 1wo0 U00 1zc0 U00 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0","America/Hermosillo|LMT MST CST PST MDT|7n.Q 70 60 80 60|0121212131414141|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 otX0 gmN0 P2N0 13Vd0 1lb0 14p0 1lb0 14p0 1lb0","America/Indiana/Knox|CST CDT CWT CPT EST|60 50 50 50 50|0101023010101010101010101010101010101040101010101010101010101010101010101010101010101010141010101010101010101010101010101010101010101010101010101010101010|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 3NB0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 11z0 1o10 11z0 1o10 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 3Cn0 8wp0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 z8o0 1o00 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Indiana/Marengo|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|0101023010101010101010104545454545414545454545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 dyN0 11z0 6fd0 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 jrz0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1VA0 LA0 1BX0 1e6p0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Indiana/Petersburg|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|01010230101010101010101010104010101010101010101010141014545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 njX0 WN0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 3Fb0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 19co0 1o00 Rd0 1zb0 Oo0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Indiana/Tell_City|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|01010230101010101010101010101010454541010101010101010101010101010101010101010101010101010101010101010|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 g0p0 11z0 1o10 11z0 1qL0 WN0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 caL0 1cL0 1cN0 1cL0 1qhd0 1o00 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Indiana/Vevay|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|010102304545454545454545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 kPB0 Awn0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1lnd0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Indiana/Vincennes|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|01010230101010101010101010101010454541014545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 g0p0 11z0 1o10 11z0 1qL0 WN0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 caL0 1cL0 1cN0 1cL0 1qhd0 1o00 Rd0 1zb0 Oo0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Indiana/Winamac|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|01010230101010101010101010101010101010454541054545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 jrz0 1cL0 1cN0 1cL0 1qhd0 1o00 Rd0 1za0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Inuvik|zzz PST PDDT MST MDT|0 80 60 70 60|0121343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-FnA0 tWU0 1fA0 wPe0 2pz0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Iqaluit|zzz EWT EPT EST EDDT EDT CST CDT|0 40 40 50 30 40 60 50|01234353535353535353535353535353535353535353567353535353535353535353535353535353535353535353535353535353535353535353535353|-16K00 7nX0 iv0 LCL0 1fA0 zgO0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11C0 1nX0 11A0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Jamaica|KMT EST EDT|57.b 50 40|0121212121212121212121|-2l1uQ.N 2uM1Q.N 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0","America/Juneau|PST PWT PPT PDT YDT YST AKST AKDT|80 70 70 70 80 90 90 80|01203030303030303030303030403030356767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-17T20 8x10 iy0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cM0 1cM0 1cL0 1cN0 1fz0 1a10 1fz0 co0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Kentucky/Louisville|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|0101010102301010101010101010101010101454545454545414545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 3Fd0 Nb0 LPd0 11z0 RB0 8x30 iw0 Bb0 10N0 2bB0 8in0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 xz0 gso0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1VA0 LA0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Kentucky/Monticello|CST CDT CWT CPT EST EDT|60 50 50 50 50 40|0101023010101010101010101010101010101010101010101010101010101010101010101454545454545454545454545454545454545454545454545454545454545454545454545454|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 SWp0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11A0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/La_Paz|CMT BOST BOT|4w.A 3w.A 40|012|-1x37r.o 13b0","America/Lima|LMT PET PEST|58.A 50 40|0121212121212121|-2tyGP.o 1bDzP.o zX0 1aN0 1cL0 1cN0 1cL0 1PrB0 zX0 1O10 zX0 6Gp0 zX0 98p0 zX0","America/Los_Angeles|PST PDT PWT PPT|80 70 70 70|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261q0 1nX0 11B0 1nX0 SgN0 8x10 iy0 5Wp0 1Vb0 3dB0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Maceio|LMT BRT BRST|2m.Q 30 20|012121212121212121212121212121212121212121|-2glxB.8 HdLB.8 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 dMN0 Lz0 8Q10 WL0 1tB0 5z0 2mN0 On0","America/Managua|MMT CST EST CDT|5J.c 60 50 50|0121313121213131|-1quie.M 1yAMe.M 4mn0 9Up0 Dz0 1K10 Dz0 s3F0 1KH0 DB0 9In0 k8p0 19X0 1o30 11y0","America/Manaus|LMT AMT AMST|40.4 40 30|01212121212121212121212121212121|-2glvX.U HdKX.U 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 dPB0 On0","America/Martinique|FFMT AST ADT|44.k 40 30|0121|-2mPTT.E 2LPbT.E 19X0","America/Matamoros|LMT CST CDT|6E 60 50|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1UQG0 2FjC0 1nX0 i6p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Mazatlan|LMT MST CST PST MDT|75.E 70 60 80 60|0121212131414141414141414141414141414141414141414141414141414141414141414141414141414141414141|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 otX0 gmN0 P2N0 13Vd0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Menominee|CST CDT CWT CPT EST|60 50 50 50 50|01010230101041010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 LCN0 1fz0 6410 9Jb0 1cM0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Merida|LMT CST EST CDT|5W.s 60 50 50|0121313131313131313131313131313131313131313131313131313131313131313131313131313131313131|-1UQG0 2q2o0 2hz0 wu30 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Metlakatla|PST PWT PPT PDT|80 70 70 70|0120303030303030303030303030303030|-17T20 8x10 iy0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0","America/Mexico_City|LMT MST CST CDT CWT|6A.A 70 60 50 50|012121232324232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 gEn0 TX0 3xd0 Jb0 6zB0 SL0 e5d0 17b0 1Pff0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Miquelon|LMT AST PMST PMDT|3I.E 40 30 20|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-2mKkf.k 2LTAf.k gQ10 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Moncton|EST AST ADT AWT APT|50 40 30 30 30|012121212121212121212134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2IsH0 CwN0 1in0 zAo0 An0 1Nd0 An0 1Nd0 An0 1Nd0 An0 1Nd0 An0 1Nd0 An0 1K10 Lz0 1zB0 NX0 1u10 Wn0 S20 8x50 iu0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 3Cp0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14n1 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 ReX 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Monterrey|LMT CST CDT|6F.g 60 50|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1UQG0 2FjC0 1nX0 i6p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Montevideo|MMT UYT UYHST UYST UYT UYHST|3I.I 3u 30 20 30 2u|012121212121212121212121213434343434345454543453434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-20UIf.g 8jzJ.g 1cLu 1dcu 1cLu 1dcu 1cLu ircu 11zu 1o0u 11zu 1o0u 11zu 1qMu WLu 1qMu WLu 1qMu WLu 1qMu 11zu 1o0u 11zu NAu 11bu 2iMu zWu Dq10 19X0 pd0 jz0 cm10 19X0 1fB0 1on0 11d0 1oL0 1nB0 1fzu 1aou 1fzu 1aou 1fzu 3nAu Jb0 3MN0 1SLu 4jzu 2PB0 Lb0 3Dd0 1pb0 ixd0 An0 1MN0 An0 1wp0 On0 1wp0 Rb0 1zd0 On0 1wp0 Rb0 s8p0 1fB0 1ip0 11z0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10","America/Montreal|EST EDT EWT EPT|50 40 40 40|01010101010101010101010101010101010101010101012301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-25TR0 1in0 11Wu 1nzu 1fD0 WJ0 1wr0 Nb0 1Ap0 On0 1zd0 On0 1wp0 TX0 1tB0 TX0 1tB0 TX0 1tB0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 4kM0 8x40 iv0 1o10 11z0 1nX0 11z0 1o10 11z0 1o10 1qL0 11D0 1nX0 11B0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Nassau|LMT EST EDT|59.u 50 40|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2kNuO.u 26XdO.u 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/New_York|EST EDT EWT EPT|50 40 40 40|01010101010101010101010101010101010101010101010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261t0 1nX0 11B0 1nX0 11B0 1qL0 1a10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 RB0 8x40 iv0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Nipigon|EST EDT EWT EPT|50 40 40 40|010123010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-25TR0 1in0 Rnb0 3je0 8x40 iv0 19yN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Nome|NST NWT NPT BST BDT YST AKST AKDT|b0 a0 a0 b0 a0 90 90 80|012034343434343434343434343434343456767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-17SX0 8wW0 iB0 Qlb0 52O0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cl0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Noronha|LMT FNT FNST|29.E 20 10|0121212121212121212121212121212121212121|-2glxO.k HdKO.k 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 nsp0 WL0 1tB0 2L0 2pB0 On0","America/North_Dakota/Beulah|MST MDT MWT MPT CST CDT|70 60 60 60 60 50|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101014545454545454545454545454545454545454545454545454545454|-261r0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Oo0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/North_Dakota/Center|MST MDT MWT MPT CST CDT|70 60 60 60 60 50|010102301010101010101010101010101010101010101010101010101014545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-261r0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14o0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/North_Dakota/New_Salem|MST MDT MWT MPT CST CDT|70 60 60 60 60 50|010102301010101010101010101010101010101010101010101010101010101010101010101010101454545454545454545454545454545454545454545454545454545454545454545454|-261r0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14o0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Ojinaga|LMT MST CST CDT MDT|6V.E 70 60 50 60|0121212323241414141414141414141414141414141414141414141414141414141414141414141414141414141|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 2zQN0 1lb0 14p0 1lb0 14q0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Pangnirtung|zzz AST AWT APT ADDT ADT EDT EST CST CDT|0 40 30 30 20 30 40 50 60 50|012314151515151515151515151515151515167676767689767676767676767676767676767676767676767676767676767676767676767676767676767|-1XiM0 PnG0 8x50 iu0 LCL0 1fA0 zgO0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1o00 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11C0 1nX0 11A0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Paramaribo|LMT PMT PMT NEGT SRT SRT|3E.E 3E.Q 3E.A 3u 3u 30|012345|-2nDUj.k Wqo0.c qanX.I 1dmLN.o lzc0","America/Phoenix|MST MDT MWT|70 60 60|01010202010|-261r0 1nX0 11B0 1nX0 SgN0 4Al1 Ap0 1db0 SWqX 1cL0","America/Port-au-Prince|PPMT EST EDT|4N 50 40|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-28RHb 2FnMb 19X0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14q0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 i6n0 1nX0 11B0 1nX0 d430 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Porto_Acre|LMT ACT ACST AMT|4v.c 50 40 40|01212121212121212121212121212131|-2glvs.M HdLs.M 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 NBd0 d5X0","America/Porto_Velho|LMT AMT AMST|4f.A 40 30|012121212121212121212121212121|-2glvI.o HdKI.o 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0","America/Puerto_Rico|AST AWT APT|40 30 30|0120|-17lU0 7XT0 iu0","America/Rainy_River|CST CDT CWT CPT|60 50 50 50|010123010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-25TQ0 1in0 Rnb0 3je0 8x30 iw0 19yN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Rankin_Inlet|zzz CST CDDT CDT EST|0 60 40 50 50|012131313131313131313131313131313131313131313431313131313131313131313131313131313131313131313131313131313131313131313131|-vDc0 keu0 1fA0 zgO0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Recife|LMT BRT BRST|2j.A 30 20|0121212121212121212121212121212121212121|-2glxE.o HdLE.o 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 nsp0 WL0 1tB0 2L0 2pB0 On0","America/Regina|LMT MST MDT MWT MPT CST|6W.A 70 60 60 60 60|012121212121212121212121341212121212121212121212121215|-2AD51.o uHe1.o 1in0 s2L0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 66N0 1cL0 1cN0 19X0 1fB0 1cL0 1fB0 1cL0 1cN0 1cL0 M30 8x20 ix0 1ip0 1cL0 1ip0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 3NB0 1cL0 1cN0","America/Resolute|zzz CST CDDT CDT EST|0 60 40 50 50|012131313131313131313131313131313131313131313431313131313431313131313131313131313131313131313131313131313131313131313131|-SnA0 GWS0 1fA0 zgO0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Santa_Isabel|LMT MST PST PDT PWT PPT|7D.s 70 80 70 70 70|012123245232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1UQE0 4PX0 8mM0 8lc0 SN0 1cL0 pHB0 83r0 zI0 5O10 1Rz0 cOP0 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 BUp0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0","America/Santarem|LMT AMT AMST BRT|3C.M 40 30 30|0121212121212121212121212121213|-2glwl.c HdLl.c 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 NBd0","America/Santiago|SMT CLT CLT CLST CLST CLT|4G.K 50 40 40 30 30|01020313131313121242124242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424245|-2q2jh.e fJAh.e 5knG.K 1Vzh.e jRAG.K 1pbh.e 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 nHX0 op0 9Bz0 jb0 1oN0 ko0 Qeo0 WL0 1zd0 On0 1ip0 11z0 1o10 11z0 1qN0 WL0 1ld0 14n0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 1wn0","America/Santo_Domingo|SDMT EST EDT EHDT AST|4E 50 40 4u 40|01213131313131414|-1ttjk 1lJMk Mn0 6sp0 Lbu 1Cou yLu 1RAu wLu 1QMu xzu 1Q0u xXu 1PAu 13jB0 e00","America/Sao_Paulo|LMT BRT BRST|36.s 30 20|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-2glwR.w HdKR.w 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 pTd0 PX0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 1C10 Lz0 1Ip0 HX0 1zd0 On0 1HB0 IL0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10 Lz0 1C10 On0 1zd0 Rb0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0","America/Scoresbysund|LMT CGT CGST EGST EGT|1r.Q 20 10 0 10|0121343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-2a5Ww.8 2z5ew.8 1a00 1cK0 1cL0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","America/Sitka|PST PWT PPT PDT YST AKST AKDT|80 70 70 70 90 90 80|01203030303030303030303030303030345656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-17T20 8x10 iy0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 co0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/St_Johns|NST NDT NST NDT NWT NPT NDDT|3u.Q 2u.Q 3u 2u 2u 2u 1u|01010101010101010101010101010101010102323232323232324523232323232323232323232323232323232323232323232323232323232323232323232323232323232326232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-28oit.8 14L0 1nB0 1in0 1gm0 Dz0 1JB0 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 1cL0 1fB0 19X0 1fB0 19X0 10O0 eKX.8 19X0 1iq0 WL0 1qN0 WL0 1qN0 WL0 1tB0 TX0 1tB0 WL0 1qN0 WL0 1qN0 7UHu itu 1tB0 WL0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1tB0 WL0 1ld0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14n1 1lb0 14p0 1nW0 11C0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zcX Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Swift_Current|LMT MST MDT MWT MPT CST|7b.k 70 60 60 60 60|012134121212121212121215|-2AD4M.E uHdM.E 1in0 UGp0 8x20 ix0 1o10 17b0 1ip0 11z0 1o10 11z0 1o10 11z0 isN0 1cL0 3Cp0 1cL0 1cN0 11z0 1qN0 WL0 pMp0","America/Tegucigalpa|LMT CST CDT|5M.Q 60 50|01212121|-1WGGb.8 2ETcb.8 WL0 1qN0 WL0 GRd0 AL0","America/Thule|LMT AST ADT|4z.8 40 30|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2a5To.Q 31NBo.Q 1cL0 1cN0 1cL0 1fB0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Thunder_Bay|CST EST EWT EPT EDT|60 50 40 40 40|0123141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141|-2q5S0 1iaN0 8x40 iv0 XNB0 1cL0 1cN0 1fz0 1cN0 1cL0 3Cp0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Vancouver|PST PDT PWT PPT|80 70 70 70|0102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-25TO0 1in0 UGp0 8x10 iy0 1o10 17b0 1ip0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Whitehorse|YST YDT YWT YPT YDDT PST PDT|90 80 80 80 70 80 70|0101023040565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-25TN0 1in0 1o10 13V0 Ser0 8x00 iz0 LCL0 1fA0 3NA0 vrd0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Winnipeg|CST CDT CWT CPT|60 50 50 50|010101023010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aIi0 WL0 3ND0 1in0 Jap0 Rb0 aCN0 8x30 iw0 1tB0 11z0 1ip0 11z0 1o10 11z0 1o10 11z0 1rd0 10L0 1op0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 1cL0 1cN0 11z0 6i10 WL0 6i10 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1a00 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1a00 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Yakutat|YST YWT YPT YDT AKST AKDT|90 80 80 80 90 80|01203030303030303030303030303030304545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-17T10 8x00 iz0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cn0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","America/Yellowknife|zzz MST MWT MPT MDDT MDT|0 70 60 60 50 60|012314151515151515151515151515151515151515151515151515151515151515151515151515151515151515151515151515151515151515151515151|-1pdA0 hix0 8x20 ix0 LCL0 1fA0 zgO0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","Antarctica/Casey|zzz AWST CAST|0 -80 -b0|012121|-2q00 1DjS0 T90 40P0 KL0","Antarctica/Davis|zzz DAVT DAVT|0 -70 -50|01012121|-vyo0 iXt0 alj0 1D7v0 VB0 3Wn0 KN0","Antarctica/DumontDUrville|zzz PMT DDUT|0 -a0 -a0|0102|-U0o0 cfq0 bFm0","Antarctica/Macquarie|AEST AEDT zzz MIST|-a0 -b0 0 -b0|0102010101010101010101010101010101010101010101010101010101010101010101010101010101010101013|-29E80 19X0 4SL0 1ayy0 Lvs0 1cM0 1o00 Rc0 1wo0 Rc0 1wo0 U00 1wo0 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1qM0 WM0 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1wo0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 11A0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 11A0 1o00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1cM0 1cM0 1cM0","Antarctica/Mawson|zzz MAWT MAWT|0 -60 -50|012|-CEo0 2fyk0","Antarctica/McMurdo|NZMT NZST NZST NZDT|-bu -cu -c0 -d0|01020202020202020202020202023232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323|-1GCVu Lz0 1tB0 11zu 1o0u 11zu 1o0u 11zu 1o0u 14nu 1lcu 14nu 1lcu 1lbu 11Au 1nXu 11Au 1nXu 11Au 1nXu 11Au 1nXu 11Au 1qLu WMu 1qLu 11Au 1n1bu IM0 1C00 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1qM0 14o0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1io0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00","Antarctica/Palmer|zzz ARST ART ART ARST CLT CLST CLT|0 30 40 30 20 40 30 30|012121212123435656565656565656565656565656565656565656565656565656565656565656567|-cao0 nD0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 jsN0 14N0 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 1wn0","Antarctica/Rothera|zzz ROTT|0 30|01|gOo0","Antarctica/Syowa|zzz SYOT|0 -30|01|-vs00","Antarctica/Troll|zzz UTC CEST|0 0 -20|01212121212121212121212121212121212121212121212121212121212121212121|1puo0 hd0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Antarctica/Vostok|zzz VOST|0 -60|01|-tjA0","Arctic/Longyearbyen|CET CEST|-10 -20|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2awM0 Qm0 W6o0 5pf0 WM0 1fA0 1cM0 1cM0 1cM0 1cM0 wJc0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1qM0 WM0 zpc0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Asia/Aden|LMT AST|-36.Q -30|01|-TvD6.Q","Asia/Almaty|LMT ALMT ALMT ALMST|-57.M -50 -60 -70|0123232323232323232323232323232323232323232323232|-1Pc57.M eUo7.M 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 3Cl0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0","Asia/Amman|LMT EET EEST|-2n.I -20 -30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1yW2n.I 1HiMn.I KL0 1oN0 11b0 1oN0 11b0 1pd0 1dz0 1cp0 11b0 1op0 11b0 fO10 1db0 1e10 1cL0 1cN0 1cL0 1cN0 1fz0 1pd0 10n0 1ld0 14n0 1hB0 15b0 1ip0 19X0 1cN0 1cL0 1cN0 17b0 1ld0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1So0 y00 1fc0 1dc0 1co0 1dc0 1cM0 1cM0 1cM0 1o00 11A0 1lc0 17c0 1cM0 1cM0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 4bX0 Dd0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0","Asia/Anadyr|LMT ANAT ANAT ANAST ANAST ANAST ANAT|-bN.U -c0 -d0 -e0 -d0 -c0 -b0|01232414141414141414141561414141414141414141414141414141414141561|-1PcbN.U eUnN.U 23CL0 1db0 1cN0 1dc0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qN0 WM0","Asia/Aqtau|LMT FORT FORT SHET SHET SHEST AQTT AQTST AQTST AQTT|-3l.4 -40 -50 -50 -60 -60 -50 -60 -50 -40|012345353535353535353536767676898989898989898989896|-1Pc3l.4 eUnl.4 1jcL0 JDc0 1cL0 1dc0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2UK0 Fz0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cN0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 RW0","Asia/Aqtobe|LMT AKTT AKTT AKTST AKTT AQTT AQTST|-3M.E -40 -50 -60 -60 -50 -60|01234323232323232323232565656565656565656565656565|-1Pc3M.E eUnM.E 23CL0 1db0 1cM0 1dc0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2UK0 Fz0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0","Asia/Ashgabat|LMT ASHT ASHT ASHST ASHST TMT TMT|-3R.w -40 -50 -60 -50 -40 -50|012323232323232323232324156|-1Pc3R.w eUnR.w 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 ba0 xC0","Asia/Baghdad|BMT AST ADT|-2V.A -30 -40|012121212121212121212121212121212121212121212121212121|-26BeV.A 2ACnV.A 11b0 1cp0 1dz0 1dd0 1db0 1cN0 1cp0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1de0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0","Asia/Bahrain|LMT GST AST|-3q.8 -40 -30|012|-21Jfq.8 27BXq.8","Asia/Baku|LMT BAKT BAKT BAKST BAKST AZST AZT AZT AZST|-3j.o -30 -40 -50 -40 -40 -30 -40 -50|0123232323232323232323245657878787878787878787878787878787878787878787878787878787878787878787878787878787878787|-1Pc3j.o 1jUoj.o WCL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 10K0 c30 1cJ0 1cL0 8wu0 1o00 11z0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Asia/Bangkok|BMT ICT|-6G.4 -70|01|-218SG.4","Asia/Beirut|EET EEST|-20 -30|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-21aq0 1on0 1410 1db0 19B0 1in0 1ip0 WL0 1lQp0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 11b0 q6N0 En0 1oN0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 11b0 1op0 11b0 dA10 17b0 1iN0 17b0 1iN0 17b0 1iN0 17b0 1vB0 SL0 1mp0 13z0 1iN0 17b0 1iN0 17b0 1jd0 12n0 1a10 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0","Asia/Bishkek|LMT FRUT FRUT FRUST FRUST KGT KGST KGT|-4W.o -50 -60 -70 -60 -50 -60 -60|01232323232323232323232456565656565656565656565656567|-1Pc4W.o eUnW.o 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 11c0 1tX0 17b0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1cPu 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 T8u","Asia/Brunei|LMT BNT BNT|-7D.E -7u -80|012|-1KITD.E gDc9.E","Asia/Calcutta|HMT BURT IST IST|-5R.k -6u -5u -6u|01232|-18LFR.k 1unn.k HB0 7zX0","Asia/Chita|LMT YAKT YAKT YAKST YAKST YAKT IRKT|-7x.Q -80 -90 -a0 -90 -a0 -80|012323232323232323232324123232323232323232323232323232323232323256|-21Q7x.Q pAnx.Q 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Choibalsan|LMT ULAT ULAT CHOST CHOT CHOT CHOST|-7C -70 -80 -a0 -90 -80 -90|0123434343434343434343434343434343434343434343456565656565656565656565656565656565656565656565|-2APHC 2UkoC cKn0 1da0 1dd0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 6hD0 11z0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 3Db0 h1f0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0","Asia/Chongqing|CST CDT|-80 -90|01010101010101010|-1c1I0 LX0 16p0 1jz0 1Myp0 Rb0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0","Asia/Colombo|MMT IST IHST IST LKT LKT|-5j.w -5u -60 -6u -6u -60|01231451|-2zOtj.w 1rFbN.w 1zzu 7Apu 23dz0 11zu n3cu","Asia/Dacca|HMT BURT IST DACT BDT BDST|-5R.k -6u -5u -60 -60 -70|01213454|-18LFR.k 1unn.k HB0 m6n0 LqMu 1x6n0 1i00","Asia/Damascus|LMT EET EEST|-2p.c -20 -30|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-21Jep.c Hep.c 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1xRB0 11X0 1oN0 10L0 1pB0 11b0 1oN0 10L0 1mp0 13X0 1oN0 11b0 1pd0 11b0 1oN0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 Nb0 1AN0 Nb0 bcp0 19X0 1gp0 19X0 3ld0 1xX0 Vd0 1Bz0 Sp0 1vX0 10p0 1dz0 1cN0 1cL0 1db0 1db0 1g10 1an0 1ap0 1db0 1fd0 1db0 1cN0 1db0 1dd0 1db0 1cp0 1dz0 1c10 1dX0 1cN0 1db0 1dd0 1db0 1cN0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1db0 1cN0 1db0 1cN0 19z0 1fB0 1qL0 11B0 1on0 Wp0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0","Asia/Dili|LMT TLT JST TLT WITA|-8m.k -80 -90 -90 -80|012343|-2le8m.k 1dnXm.k 8HA0 1ew00 Xld0","Asia/Dubai|LMT GST|-3F.c -40|01|-21JfF.c","Asia/Dushanbe|LMT DUST DUST DUSST DUSST TJT|-4z.c -50 -60 -70 -60 -50|0123232323232323232323245|-1Pc4z.c eUnz.c 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 14N0","Asia/Gaza|EET EET EEST IST IDT|-20 -30 -30 -20 -30|010101010102020202020202020202023434343434343434343434343430202020202020202020202020202020202020202020202020202020202020202020202020202020202020|-1c2q0 5Rb0 10r0 1px0 10N0 1pz0 16p0 1jB0 16p0 1jx0 pBd0 Vz0 1oN0 11b0 1oO0 10N0 1pz0 10N0 1pb0 10N0 1pb0 10N0 1pb0 10N0 1pz0 10N0 1pb0 10N0 1pb0 11d0 1oL0 dW0 hfB0 Db0 1fB0 Rb0 npB0 11z0 1C10 IL0 1s10 10n0 1o10 WL0 1zd0 On0 1ld0 11z0 1o10 14n0 1o10 14n0 1nd0 12n0 1nd0 Xz0 1q10 12n0 M10 C00 17c0 1io0 17c0 1io0 17c0 1o00 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 17c0 1io0 18N0 1bz0 19z0 1gp0 1610 1iL0 11z0 1o10 14o0 1lA1 SKX 1xd1 MKX 1AN0 1a00 1fA0 1cL0 1cN0 1nX0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0","Asia/Hebron|EET EET EEST IST IDT|-20 -30 -30 -20 -30|01010101010202020202020202020202343434343434343434343434343020202020202020202020202020202020202020202020202020202020202020202020202020202020202020|-1c2q0 5Rb0 10r0 1px0 10N0 1pz0 16p0 1jB0 16p0 1jx0 pBd0 Vz0 1oN0 11b0 1oO0 10N0 1pz0 10N0 1pb0 10N0 1pb0 10N0 1pb0 10N0 1pz0 10N0 1pb0 10N0 1pb0 11d0 1oL0 dW0 hfB0 Db0 1fB0 Rb0 npB0 11z0 1C10 IL0 1s10 10n0 1o10 WL0 1zd0 On0 1ld0 11z0 1o10 14n0 1o10 14n0 1nd0 12n0 1nd0 Xz0 1q10 12n0 M10 C00 17c0 1io0 17c0 1io0 17c0 1o00 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 17c0 1io0 18N0 1bz0 19z0 1gp0 1610 1iL0 12L0 1mN0 14o0 1lc0 Tb0 1xd1 MKX bB0 cn0 1cN0 1a00 1fA0 1cL0 1cN0 1nX0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 1210 1nz0 14N0 1nz0 1210 1nz0 1210 1nz0 1210 1nz0","Asia/Ho_Chi_Minh|LMT PLMT ICT IDT JST|-76.E -76.u -70 -80 -90|0123423232|-2yC76.E bK00.a 1h7b6.u 5lz0 18o0 3Oq0 k5b0 aW00 BAM0","Asia/Hong_Kong|LMT HKT HKST JST|-7A.G -80 -90 -90|0121312121212121212121212121212121212121212121212121212121212121212121|-2CFHA.G 1sEP6.G 1cL0 ylu 93X0 1qQu 1tX0 Rd0 1In0 NB0 1cL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1kL0 14N0 1nX0 U10 1tz0 U10 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 U10 1tz0 U10 1wn0 Rd0 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 17d0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 s10 1Vz0 1cN0 1cL0 1cN0 1cL0 6fd0 14n0","Asia/Hovd|LMT HOVT HOVT HOVST|-66.A -60 -70 -80|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-2APG6.A 2Uko6.A cKn0 1db0 1dd0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 6hD0 11z0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 kEp0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0","Asia/Irkutsk|IMT IRKT IRKT IRKST IRKST IRKT|-6V.5 -70 -80 -90 -80 -90|012323232323232323232324123232323232323232323232323232323232323252|-21zGV.5 pjXV.5 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Istanbul|IMT EET EEST TRST TRT|-1U.U -20 -30 -40 -30|012121212121212121212121212121212121212121212121212121234343434342121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2ogNU.U dzzU.U 11b0 8tB0 1on0 1410 1db0 19B0 1in0 3Rd0 Un0 1oN0 11b0 zSp0 CL0 mN0 1Vz0 1gN0 1pz0 5Rd0 1fz0 1yp0 ML0 1kp0 17b0 1ip0 17b0 1fB0 19X0 1jB0 18L0 1ip0 17z0 qdd0 xX0 3S10 Tz0 dA10 11z0 1o10 11z0 1qN0 11z0 1ze0 11B0 WM0 1qO0 WI0 1nX0 1rB0 10L0 11B0 1in0 17d0 1in0 2pX0 19E0 1fU0 16Q0 1iI0 16Q0 1iI0 1Vd0 pb0 3Kp0 14o0 1df0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cL0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WO0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 Xc0 1qo0 WM0 1qM0 11A0 1o00 1200 1nA0 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Asia/Jakarta|BMT JAVT WIB JST WIB WIB|-77.c -7k -7u -90 -80 -70|01232425|-1Q0Tk luM0 mPzO 8vWu 6kpu 4PXu xhcu","Asia/Jayapura|LMT WIT ACST|-9m.M -90 -9u|0121|-1uu9m.M sMMm.M L4nu","Asia/Jerusalem|JMT IST IDT IDDT|-2k.E -20 -30 -40|01212121212132121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-26Bek.E SyMk.E 5Rb0 10r0 1px0 10N0 1pz0 16p0 1jB0 16p0 1jx0 3LB0 Em0 or0 1cn0 1dB0 16n0 10O0 1ja0 1tC0 14o0 1cM0 1a00 11A0 1Na0 An0 1MP0 AJ0 1Kp0 LC0 1oo0 Wl0 EQN0 Db0 1fB0 Rb0 npB0 11z0 1C10 IL0 1s10 10n0 1o10 WL0 1zd0 On0 1ld0 11z0 1o10 14n0 1o10 14n0 1nd0 12n0 1nd0 Xz0 1q10 12n0 1hB0 1dX0 1ep0 1aL0 1eN0 17X0 1nf0 11z0 1tB0 19W0 1e10 17b0 1ep0 1gL0 18N0 1fz0 1eN0 17b0 1gq0 1gn0 19d0 1dz0 1c10 17X0 1hB0 1gn0 19d0 1dz0 1c10 17X0 1kp0 1dz0 1c10 1aL0 1eN0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0","Asia/Kabul|AFT AFT|-40 -4u|01|-10Qs0","Asia/Kamchatka|LMT PETT PETT PETST PETST|-ay.A -b0 -c0 -d0 -c0|01232323232323232323232412323232323232323232323232323232323232412|-1SLKy.A ivXy.A 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qN0 WM0","Asia/Karachi|LMT IST IST KART PKT PKST|-4s.c -5u -6u -50 -50 -60|012134545454|-2xoss.c 1qOKW.c 7zX0 eup0 LqMu 1fy01 1cL0 dK0X 11b0 1610 1jX0","Asia/Kashgar|LMT XJT|-5O.k -60|01|-1GgtO.k","Asia/Kathmandu|LMT IST NPT|-5F.g -5u -5J|012|-21JhF.g 2EGMb.g","Asia/Khandyga|LMT YAKT YAKT YAKST YAKST VLAT VLAST VLAT YAKT|-92.d -80 -90 -a0 -90 -a0 -b0 -b0 -a0|01232323232323232323232412323232323232323232323232565656565656565782|-21Q92.d pAp2.d 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 qK0 yN0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 17V0 7zD0","Asia/Krasnoyarsk|LMT KRAT KRAT KRAST KRAST KRAT|-6b.q -60 -70 -80 -70 -80|012323232323232323232324123232323232323232323232323232323232323252|-21Hib.q prAb.q 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Kuala_Lumpur|SMT MALT MALST MALT MALT JST MYT|-6T.p -70 -7k -7k -7u -90 -80|01234546|-2Bg6T.p 17anT.p 7hXE dM00 17bO 8Fyu 1so1u","Asia/Kuching|LMT BORT BORT BORTST JST MYT|-7l.k -7u -80 -8k -90 -80|01232323232323232425|-1KITl.k gDbP.k 6ynu AnE 1O0k AnE 1NAk AnE 1NAk AnE 1NAk AnE 1O0k AnE 1NAk AnE pAk 8Fz0 1so10","Asia/Macao|LMT MOT MOST CST|-7y.k -80 -90 -80|0121212121212121212121212121212121212121213|-2le7y.k 1XO34.k 1wn0 Rd0 1wn0 R9u 1wqu U10 1tz0 TVu 1tz0 17gu 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cJu 1cL0 1cN0 1fz0 1cN0 1cOu 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cJu 1cL0 1cN0 1fz0 1cN0 1cL0 KEp0","Asia/Magadan|LMT MAGT MAGT MAGST MAGST MAGT|-a3.c -a0 -b0 -c0 -b0 -c0|012323232323232323232324123232323232323232323232323232323232323251|-1Pca3.c eUo3.c 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Makassar|LMT MMT WITA JST|-7V.A -7V.A -80 -90|01232|-21JjV.A vfc0 myLV.A 8ML0","Asia/Manila|PHT PHST JST|-80 -90 -90|010201010|-1kJI0 AL0 cK10 65X0 mXB0 vX0 VK10 1db0","Asia/Nicosia|LMT EET EEST|-2d.s -20 -30|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1Vc2d.s 2a3cd.s 1cL0 1qp0 Xz0 19B0 19X0 1fB0 1db0 1cp0 1cL0 1fB0 19X0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1o30 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Asia/Novokuznetsk|LMT KRAT KRAT KRAST KRAST NOVST NOVT NOVT|-5M.M -60 -70 -80 -70 -70 -60 -70|012323232323232323232324123232323232323232323232323232323232325672|-1PctM.M eULM.M 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qN0 WM0 8Hz0","Asia/Novosibirsk|LMT NOVT NOVT NOVST NOVST|-5v.E -60 -70 -80 -70|0123232323232323232323241232341414141414141414141414141414141414121|-21Qnv.E pAFv.E 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 ml0 Os0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Omsk|LMT OMST OMST OMSST OMSST OMST|-4R.u -50 -60 -70 -60 -70|012323232323232323232324123232323232323232323232323232323232323252|-224sR.u pMLR.u 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Oral|LMT URAT URAT URAST URAT URAST ORAT ORAST ORAT|-3p.o -40 -50 -60 -60 -50 -40 -50 -50|012343232323232323251516767676767676767676767676768|-1Pc3p.o eUnp.o 23CL0 1db0 1cM0 1dc0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 2UK0 Fz0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 RW0","Asia/Pontianak|LMT PMT WIB JST WIB WITA WIB|-7h.k -7h.k -7u -90 -80 -80 -70|012324256|-2ua7h.k XE00 munL.k 8Rau 6kpu 4PXu xhcu Wqnu","Asia/Pyongyang|LMT KST JCST JST KST|-8n -8u -90 -90 -90|01234|-2um8n 97XR 12FXu jdA0","Asia/Qyzylorda|LMT KIZT KIZT KIZST KIZT QYZT QYZT QYZST|-4l.Q -40 -50 -60 -60 -50 -60 -70|012343232323232323232325676767676767676767676767676|-1Pc4l.Q eUol.Q 23CL0 1db0 1cM0 1dc0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2UK0 dC0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0","Asia/Rangoon|RMT BURT JST MMT|-6o.E -6u -90 -6u|0123|-21Jio.E SmnS.E 7j9u","Asia/Sakhalin|LMT JCST JST SAKT SAKST SAKST SAKT|-9u.M -90 -90 -b0 -c0 -b0 -a0|0123434343434343434343435634343434343565656565656565656565656565636|-2AGVu.M 1iaMu.M je00 1qFa0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o10 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Samarkand|LMT SAMT SAMT SAMST TAST UZST UZT|-4r.R -40 -50 -60 -60 -60 -50|01234323232323232323232356|-1Pc4r.R eUor.R 23CL0 1db0 1cM0 1dc0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 11x0 bf0","Asia/Seoul|LMT KST JCST JST KST KDT KDT|-8r.Q -8u -90 -90 -90 -9u -a0|01234151515151515146464|-2um8r.Q 97XV.Q 12FXu jjA0 kKo0 2I0u OL0 1FB0 Rb0 1qN0 TX0 1tB0 TX0 1tB0 TX0 1tB0 TX0 2ap0 12FBu 11A0 1o00 11A0","Asia/Singapore|SMT MALT MALST MALT MALT JST SGT SGT|-6T.p -70 -7k -7k -7u -90 -7u -80|012345467|-2Bg6T.p 17anT.p 7hXE dM00 17bO 8Fyu Mspu DTA0","Asia/Srednekolymsk|LMT MAGT MAGT MAGST MAGST MAGT SRET|-ae.Q -a0 -b0 -c0 -b0 -c0 -b0|012323232323232323232324123232323232323232323232323232323232323256|-1Pcae.Q eUoe.Q 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Taipei|JWST JST CST CDT|-80 -90 -80 -90|01232323232323232323232323232323232323232|-1iw80 joM0 1yo0 Tz0 1ip0 1jX0 1cN0 11b0 1oN0 11b0 1oN0 11b0 1oN0 11b0 10N0 1BX0 10p0 1pz0 10p0 1pz0 10p0 1db0 1dd0 1db0 1cN0 1db0 1cN0 1db0 1cN0 1db0 1BB0 ML0 1Bd0 ML0 uq10 1db0 1cN0 1db0 97B0 AL0","Asia/Tashkent|LMT TAST TAST TASST TASST UZST UZT|-4B.b -50 -60 -70 -60 -60 -50|01232323232323232323232456|-1Pc4B.b eUnB.b 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 11y0 bf0","Asia/Tbilisi|TBMT TBIT TBIT TBIST TBIST GEST GET GET GEST|-2X.b -30 -40 -50 -40 -40 -30 -40 -50|0123232323232323232323245656565787878787878787878567|-1Pc2X.b 1jUnX.b WCL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 3y0 19f0 1cK0 1cL0 1cN0 1cL0 1cN0 1cL0 1cM0 1cL0 1fB0 3Nz0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 An0 Os0 WM0","Asia/Tehran|LMT TMT IRST IRST IRDT IRDT|-3p.I -3p.I -3u -40 -50 -4u|01234325252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252|-2btDp.I 1d3c0 1huLT.I TXu 1pz0 sN0 vAu 1cL0 1dB0 1en0 pNB0 UL0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 64p0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0","Asia/Thimbu|LMT IST BTT|-5W.A -5u -60|012|-Su5W.A 1BGMs.A","Asia/Tokyo|JCST JST JDT|-90 -90 -a0|0121212121|-1iw90 pKq0 QL0 1lB0 13X0 1zB0 NX0 1zB0 NX0","Asia/Ulaanbaatar|LMT ULAT ULAT ULAST|-77.w -70 -80 -90|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-2APH7.w 2Uko7.w cKn0 1db0 1dd0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 6hD0 11z0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 kEp0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1fx0 1cP0 1cJ0 1cP0 1cJ0 1cP0 1cJ0","Asia/Ust-Nera|LMT YAKT YAKT MAGST MAGT MAGST MAGT MAGT VLAT VLAT|-9w.S -80 -90 -c0 -b0 -b0 -a0 -c0 -b0 -a0|0123434343434343434343456434343434343434343434343434343434343434789|-21Q9w.S pApw.S 23CL0 1d90 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 17V0 7zD0","Asia/Vladivostok|LMT VLAT VLAT VLAST VLAST VLAT|-8L.v -90 -a0 -b0 -a0 -b0|012323232323232323232324123232323232323232323232323232323232323252|-1SJIL.v itXL.v 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Yakutsk|LMT YAKT YAKT YAKST YAKST YAKT|-8C.W -80 -90 -a0 -90 -a0|012323232323232323232324123232323232323232323232323232323232323252|-21Q8C.W pAoC.W 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Yekaterinburg|LMT PMT SVET SVET SVEST SVEST YEKT YEKST YEKT|-42.x -3J.5 -40 -50 -60 -50 -50 -60 -60|0123434343434343434343435267676767676767676767676767676767676767686|-2ag42.x 7mQh.s qBvJ.5 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Asia/Yerevan|LMT YERT YERT YERST YERST AMST AMT AMT AMST|-2W -30 -40 -50 -40 -40 -30 -40 -50|0123232323232323232323245656565657878787878787878787878787878787|-1Pc2W 1jUnW WCL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1am0 2r0 1cJ0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 3Fb0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0","Atlantic/Azores|HMT AZOT AZOST AZOMT AZOT AZOST WET|1S.w 20 10 0 10 0 0|01212121212121212121212121212121212121212121232123212321232121212121212121212121212121212121212121454545454545454545454545454545456545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-2ldW5.s aPX5.s Sp0 LX0 1vc0 Tc0 1uM0 SM0 1vc0 Tc0 1vc0 SM0 1vc0 6600 1co0 3E00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 3I00 17c0 1cM0 1cM0 3Fc0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 1tA0 1cM0 1dc0 1400 gL0 IM0 s10 U00 dX0 Rc0 pd0 Rc0 gL0 Oo0 pd0 Rc0 gL0 Oo0 pd0 14o0 1cM0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 3Co0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 qIl0 1cM0 1fA0 1cM0 1cM0 1cN0 1cL0 1cN0 1cM0 1cM0 1cM0 1cM0 1cN0 1cL0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cL0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Atlantic/Bermuda|LMT AST ADT|4j.i 40 30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1BnRE.G 1LTbE.G 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","Atlantic/Canary|LMT CANT WET WEST|11.A 10 0 -10|01232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1UtaW.o XPAW.o 1lAK0 1a10 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Atlantic/Cape_Verde|LMT CVT CVST CVT|1y.4 20 10 10|01213|-2xomp.U 1qOMp.U 7zX0 1djf0","Atlantic/Faeroe|LMT WET WEST|r.4 0 -10|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2uSnw.U 2Wgow.U 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Atlantic/Madeira|FMT MADT MADST MADMT WET WEST|17.A 10 0 -10 0 -10|01212121212121212121212121212121212121212121232123212321232121212121212121212121212121212121212121454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-2ldWQ.o aPWQ.o Sp0 LX0 1vc0 Tc0 1uM0 SM0 1vc0 Tc0 1vc0 SM0 1vc0 6600 1co0 3E00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 3I00 17c0 1cM0 1cM0 3Fc0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 1tA0 1cM0 1dc0 1400 gL0 IM0 s10 U00 dX0 Rc0 pd0 Rc0 gL0 Oo0 pd0 Rc0 gL0 Oo0 pd0 14o0 1cM0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 3Co0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 qIl0 1cM0 1fA0 1cM0 1cM0 1cN0 1cL0 1cN0 1cM0 1cM0 1cM0 1cM0 1cN0 1cL0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Atlantic/Reykjavik|LMT IST ISST GMT|1s 10 0 0|012121212121212121212121212121212121212121212121212121212121212121213|-2uWmw mfaw 1Bd0 ML0 1LB0 Cn0 1LB0 3fX0 C10 HrX0 1cO0 LB0 1EL0 LA0 1C00 Oo0 1wo0 Rc0 1wo0 Rc0 1wo0 Rc0 1zc0 Oo0 1zc0 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1lc0 14o0 1o00 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1o00 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1o00 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1o00 14o0","Atlantic/South_Georgia|GST|20|0|","Atlantic/Stanley|SMT FKT FKST FKT FKST|3P.o 40 30 30 20|0121212121212134343212121212121212121212121212121212121212121212121212|-2kJw8.A 12bA8.A 19X0 1fB0 19X0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 Cn0 1Cc10 WL0 1qL0 U10 1tz0 U10 1qM0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1tz0 U10 1tz0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1tz0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qN0 U10 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 U10 1tz0 U10 1tz0 U10 1wn0 U10 1tz0 U10 1tz0 U10","Australia/ACT|AEST AEDT|-a0 -b0|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101|-293lX xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 14o0 1o00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1tA0 WM0 1tA0 U00 1tA0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 11A0 1o00 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0","Australia/Adelaide|ACST ACDT|-9u -au|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101|-293lt xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 Oo0 1zc0 WM0 1qM0 Rc0 1zc0 U00 1tA0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0","Australia/Brisbane|AEST AEDT|-a0 -b0|01010101010101010|-293lX xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 H1A0 Oo0 1zc0 Oo0 1zc0 Oo0","Australia/Broken_Hill|ACST ACDT|-9u -au|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101|-293lt xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 14o0 1o00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1tA0 WM0 1tA0 U00 1tA0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0","Australia/Currie|AEST AEDT|-a0 -b0|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101|-29E80 19X0 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1qM0 WM0 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1wo0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 11A0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 11A0 1o00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0","Australia/Darwin|ACST ACDT|-9u -au|010101010|-293lt xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0","Australia/Eucla|ACWST ACWDT|-8J -9J|0101010101010101010|-293kI xcX 10jd0 yL0 1cN0 1cL0 1gSp0 Oo0 l5A0 Oo0 iJA0 G00 zU00 IM0 1qM0 11A0 1o00 11A0","Australia/Hobart|AEST AEDT|-a0 -b0|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101|-29E80 19X0 10jd0 yL0 1cN0 1cL0 1fB0 19X0 VfB0 1cM0 1o00 Rc0 1wo0 Rc0 1wo0 U00 1wo0 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1qM0 WM0 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1wo0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 11A0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 11A0 1o00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0","Australia/LHI|AEST LHST LHDT LHDT|-a0 -au -bu -b0|0121212121313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313|raC0 1zdu Rb0 1zd0 On0 1zd0 On0 1zd0 On0 1zd0 TXu 1qMu WLu 1tAu WLu 1tAu TXu 1tAu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1zcu Onu 1zcu Onu 1zcu 11zu 1o0u 11zu 1o0u 11zu 1o0u 11zu 1qMu WLu 11Au 1nXu 1qMu 11zu 1o0u 11zu 1o0u 11zu 1qMu WLu 1qMu 11zu 1o0u WLu 1qMu 14nu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu","Australia/Lindeman|AEST AEDT|-a0 -b0|010101010101010101010|-293lX xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 H1A0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0","Australia/Melbourne|AEST AEDT|-a0 -b0|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101|-293lX xcX 10jd0 yL0 1cN0 1cL0 1fB0 19X0 17c10 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1qM0 11A0 1tA0 U00 1tA0 U00 1tA0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 11A0 1o00 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0","Australia/Perth|AWST AWDT|-80 -90|0101010101010101010|-293jX xcX 10jd0 yL0 1cN0 1cL0 1gSp0 Oo0 l5A0 Oo0 iJA0 G00 zU00 IM0 1qM0 11A0 1o00 11A0","CET|CET CEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 16M0 1gMM0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","CST6CDT|CST CDT CWT CPT|60 50 50 50|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","Chile/EasterIsland|EMT EAST EASST EAST EASST EAST|7h.s 70 60 60 50 50|012121212121212121212121212123434343434343434343434343434343434343434343434343434343434343434345|-1uSgG.w 1s4IG.w WL0 1zd0 On0 1ip0 11z0 1o10 11z0 1qN0 WL0 1ld0 14n0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 1wn0","EET|EET EEST|-20 -30|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|hDB0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","EST|EST|50|0|","EST5EDT|EST EDT EWT EPT|50 40 40 40|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261t0 1nX0 11B0 1nX0 SgN0 8x40 iv0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","Eire|DMT IST GMT BST IST|p.l -y.D 0 -10 -10|01232323232324242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242|-2ax9y.D Rc0 1fzy.D 14M0 1fc0 1g00 1co0 1dc0 1co0 1oo0 1400 1dc0 19A0 1io0 1io0 WM0 1o00 14o0 1o00 17c0 1io0 17c0 1fA0 1a00 1lc0 17c0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1cM0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1io0 1qM0 Dc0 g5X0 14p0 1wn0 17d0 1io0 11A0 1o00 17c0 1fA0 1a00 1fA0 1cM0 1fA0 1a00 17c0 1fA0 1a00 1io0 17c0 1lc0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1a00 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1tA0 IM0 90o0 U00 1tA0 U00 1tA0 U00 1tA0 U00 1tA0 WM0 1qM0 WM0 1qM0 WM0 1tA0 U00 1tA0 U00 1tA0 11z0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 14o0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Etc/GMT+0|GMT|0|0|","Etc/GMT+1|GMT+1|10|0|","Etc/GMT+10|GMT+10|a0|0|","Etc/GMT+11|GMT+11|b0|0|","Etc/GMT+12|GMT+12|c0|0|","Etc/GMT+2|GMT+2|20|0|","Etc/GMT+3|GMT+3|30|0|","Etc/GMT+4|GMT+4|40|0|","Etc/GMT+5|GMT+5|50|0|","Etc/GMT+6|GMT+6|60|0|","Etc/GMT+7|GMT+7|70|0|","Etc/GMT+8|GMT+8|80|0|","Etc/GMT+9|GMT+9|90|0|","Etc/GMT-1|GMT-1|-10|0|","Etc/GMT-10|GMT-10|-a0|0|","Etc/GMT-11|GMT-11|-b0|0|","Etc/GMT-12|GMT-12|-c0|0|","Etc/GMT-13|GMT-13|-d0|0|","Etc/GMT-14|GMT-14|-e0|0|","Etc/GMT-2|GMT-2|-20|0|","Etc/GMT-3|GMT-3|-30|0|","Etc/GMT-4|GMT-4|-40|0|","Etc/GMT-5|GMT-5|-50|0|","Etc/GMT-6|GMT-6|-60|0|","Etc/GMT-7|GMT-7|-70|0|","Etc/GMT-8|GMT-8|-80|0|","Etc/GMT-9|GMT-9|-90|0|","Etc/UCT|UCT|0|0|","Etc/UTC|UTC|0|0|","Europe/Amsterdam|AMT NST NEST NET CEST CET|-j.w -1j.w -1k -k -20 -10|010101010101010101010101010101010101010101012323234545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545|-2aFcj.w 11b0 1iP0 11A0 1io0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1co0 1io0 1yo0 Pc0 1a00 1fA0 1Bc0 Mo0 1tc0 Uo0 1tA0 U00 1uo0 W00 1s00 VA0 1so0 Vc0 1sM0 UM0 1wo0 Rc0 1u00 Wo0 1rA0 W00 1s00 VA0 1sM0 UM0 1w00 fV0 BCX.w 1tA0 U00 1u00 Wo0 1sm0 601k WM0 1fA0 1cM0 1cM0 1cM0 16M0 1gMM0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Andorra|WET CET CEST|0 -10 -20|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-UBA0 1xIN0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Athens|AMT EET EEST CEST CET|-1y.Q -20 -30 -20 -10|012123434121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2a61x.Q CNbx.Q mn0 kU10 9b0 3Es0 Xa0 1fb0 1dd0 k3X0 Nz0 SCp0 1vc0 SO0 1cM0 1a00 1ao0 1fc0 1a10 1fG0 1cg0 1dX0 1bX0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Belfast|GMT BST BDST|0 -10 -20|0101010101010101010101010101010101010101010101010121212121210101210101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2axa0 Rc0 1fA0 14M0 1fc0 1g00 1co0 1dc0 1co0 1oo0 1400 1dc0 19A0 1io0 1io0 WM0 1o00 14o0 1o00 17c0 1io0 17c0 1fA0 1a00 1lc0 17c0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1cM0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1io0 1qM0 Dc0 2Rz0 Dc0 1zc0 Oo0 1zc0 Rc0 1wo0 17c0 1iM0 FA0 xB0 1fA0 1a00 14o0 bb0 LA0 xB0 Rc0 1wo0 11A0 1o00 17c0 1fA0 1a00 1fA0 1cM0 1fA0 1a00 17c0 1fA0 1a00 1io0 17c0 1lc0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1a00 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1tA0 IM0 90o0 U00 1tA0 U00 1tA0 U00 1tA0 U00 1tA0 WM0 1qM0 WM0 1qM0 WM0 1tA0 U00 1tA0 U00 1tA0 11z0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 14o0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Belgrade|CET CEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-19RC0 3IP0 WM0 1fA0 1cM0 1cM0 1rc0 Qo0 1vmo0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Berlin|CET CEST CEMT|-10 -20 -30|01010101010101210101210101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 kL0 Nc0 m10 WM0 1ao0 1cp0 dX0 jz0 Dd0 1io0 17c0 1fA0 1a00 1ehA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Bratislava|CET CEST|-10 -20|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 16M0 1lc0 1tA0 17A0 11c0 1io0 17c0 1io0 17c0 1fc0 1ao0 1bNc0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Brussels|WET CET CEST WEST|0 -10 -20 -10|0121212103030303030303030303030303030303030303030303212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2ehc0 3zX0 11c0 1iO0 11A0 1o00 11A0 my0 Ic0 1qM0 Rc0 1EM0 UM0 1u00 10o0 1io0 1io0 17c0 1a00 1fA0 1cM0 1cM0 1io0 17c0 1fA0 1a00 1io0 1a30 1io0 17c0 1fA0 1a00 1io0 17c0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 y00 5Wn0 WM0 1fA0 1cM0 16M0 1iM0 16M0 1C00 Uo0 1eeo0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Bucharest|BMT EET EEST|-1I.o -20 -30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1xApI.o 20LI.o RA0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1Axc0 On0 1fA0 1a10 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cK0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cL0 1cN0 1cL0 1fB0 1nX0 11E0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Budapest|CET CEST|-10 -20|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1ip0 17b0 1op0 1tb0 Q2m0 3Ne0 WM0 1fA0 1cM0 1cM0 1oJ0 1dc0 1030 1fA0 1cM0 1cM0 1cM0 1cM0 1fA0 1a00 1iM0 1fA0 8Ha0 Rb0 1wN0 Rb0 1BB0 Lz0 1C20 LB0 SNX0 1a10 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Busingen|CET CEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-19Lc0 11A0 1o00 11A0 1xG10 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Chisinau|CMT BMT EET EEST CEST CET MSK MSD|-1T -1I.o -20 -30 -20 -10 -30 -40|0123232323232323232345454676767676767676767623232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-26jdT wGMa.A 20LI.o RA0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 27A0 2en0 39g0 WM0 1fA0 1cM0 V90 1t7z0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1ty0 2bD0 1cM0 1cK0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1nX0 11E0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Copenhagen|CET CEST|-10 -20|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2azC0 Tz0 VuO0 60q0 WM0 1fA0 1cM0 1cM0 1cM0 S00 1HA0 Nc0 1C00 Dc0 1Nc0 Ao0 1h5A0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Gibraltar|GMT BST BDST CET CEST|0 -10 -20 -10 -20|010101010101010101010101010101010101010101010101012121212121010121010101010101010101034343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-2axa0 Rc0 1fA0 14M0 1fc0 1g00 1co0 1dc0 1co0 1oo0 1400 1dc0 19A0 1io0 1io0 WM0 1o00 14o0 1o00 17c0 1io0 17c0 1fA0 1a00 1lc0 17c0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1cM0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1io0 1qM0 Dc0 2Rz0 Dc0 1zc0 Oo0 1zc0 Rc0 1wo0 17c0 1iM0 FA0 xB0 1fA0 1a00 14o0 bb0 LA0 xB0 Rc0 1wo0 11A0 1o00 17c0 1fA0 1a00 1fA0 1cM0 1fA0 1a00 17c0 1fA0 1a00 1io0 17c0 1lc0 17c0 1fA0 10Jz0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Helsinki|HMT EET EEST|-1D.N -20 -30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1WuND.N OULD.N 1dA0 1xGq0 1cM0 1cM0 1cM0 1cN0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Kaliningrad|CET CEST CET CEST MSK MSD EEST EET FET|-10 -20 -20 -30 -30 -40 -30 -20 -30|0101010101010232454545454545454545454676767676767676767676767676767676767676787|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 Am0 Lb0 1en0 op0 1pNz0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 1cJ0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Europe/Kiev|KMT EET MSK CEST CET MSD EEST|-22.4 -20 -30 -20 -10 -40 -30|0123434252525252525252525256161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161|-1Pc22.4 eUo2.4 rnz0 2Hg0 WM0 1fA0 da0 1v4m0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 Db0 3220 1cK0 1cL0 1cN0 1cL0 1cN0 1cL0 1cQ0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Lisbon|LMT WET WEST WEMT CET CEST|A.J 0 -10 -20 -10 -20|012121212121212121212121212121212121212121212321232123212321212121212121212121212121212121212121214121212121212121212121212121212124545454212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2ldXn.f aPWn.f Sp0 LX0 1vc0 Tc0 1uM0 SM0 1vc0 Tc0 1vc0 SM0 1vc0 6600 1co0 3E00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 3I00 17c0 1cM0 1cM0 3Fc0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 1tA0 1cM0 1dc0 1400 gL0 IM0 s10 U00 dX0 Rc0 pd0 Rc0 gL0 Oo0 pd0 Rc0 gL0 Oo0 pd0 14o0 1cM0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 3Co0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 pvy0 1cM0 1cM0 1fA0 1cM0 1cM0 1cN0 1cL0 1cN0 1cM0 1cM0 1cM0 1cM0 1cN0 1cL0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Luxembourg|LMT CET CEST WET WEST WEST WET|-o.A -10 -20 0 -10 -20 -10|0121212134343434343434343434343434343434343434343434565651212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2DG0o.A t6mo.A TB0 1nX0 Up0 1o20 11A0 rW0 CM0 1qP0 R90 1EO0 UK0 1u20 10m0 1ip0 1in0 17e0 19W0 1fB0 1db0 1cp0 1in0 17d0 1fz0 1a10 1in0 1a10 1in0 17f0 1fA0 1a00 1io0 17c0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 vA0 60L0 WM0 1fA0 1cM0 17c0 1io0 16M0 1C00 Uo0 1eeo0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Madrid|WET WEST WEMT CET CEST|0 -10 -20 -10 -20|01010101010101010101010121212121234343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-28dd0 11A0 1go0 19A0 1co0 1dA0 b1A0 18o0 3I00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 iyo0 Rc0 18o0 1hc0 1io0 1a00 14o0 5aL0 MM0 1vc0 17A0 1i00 1bc0 1eo0 17d0 1in0 17A0 6hA0 10N0 XIL0 1a10 1in0 17d0 19X0 1cN0 1fz0 1a10 1fX0 1cp0 1cO0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Malta|CET CEST|-10 -20|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2as10 M00 1cM0 1cM0 14o0 1o00 WM0 1qM0 17c0 1cM0 M3A0 5M20 WM0 1fA0 1cM0 1cM0 1cM0 16m0 1de0 1lc0 14m0 1lc0 WO0 1qM0 GTW0 On0 1C10 Lz0 1C10 Lz0 1EN0 Lz0 1C10 Lz0 1zd0 Oo0 1C00 On0 1cp0 1cM0 1lA0 Xc0 1qq0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1iN0 19z0 1fB0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Minsk|MMT EET MSK CEST CET MSD EEST FET|-1O -20 -30 -20 -10 -40 -30 -30|012343432525252525252525252616161616161616161616161616161616161616172|-1Pc1O eUnO qNX0 3gQ0 WM0 1fA0 1cM0 Al0 1tsn0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 3Fc0 1cN0 1cK0 1cM0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hy0","Europe/Monaco|PMT WET WEST WEMT CET CEST|-9.l 0 -10 -20 -10 -20|01212121212121212121212121212121212121212121212121232323232345454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-2nco9.l cNb9.l HA0 19A0 1iM0 11c0 1oo0 Wo0 1rc0 QM0 1EM0 UM0 1u00 10o0 1io0 1wo0 Rc0 1a00 1fA0 1cM0 1cM0 1io0 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 1fA0 1a00 1io0 17c0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Df0 2RV0 11z0 11B0 1ze0 WM0 1fA0 1cM0 1fa0 1aq0 16M0 1ekn0 1cL0 1fC0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Moscow|MMT MMT MST MDST MSD MSK MSM EET EEST MSK|-2u.h -2v.j -3v.j -4v.j -40 -30 -50 -20 -30 -40|012132345464575454545454545454545458754545454545454545454545454545454545454595|-2ag2u.h 2pyW.W 1bA0 11X0 GN0 1Hb0 c20 imv.j 3DA0 dz0 15A0 c10 2q10 iM10 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rU0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Europe/Paris|PMT WET WEST CEST CET WEMT|-9.l 0 -10 -20 -10 -20|0121212121212121212121212121212121212121212121212123434352543434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-2nco8.l cNb8.l HA0 19A0 1iM0 11c0 1oo0 Wo0 1rc0 QM0 1EM0 UM0 1u00 10o0 1io0 1wo0 Rc0 1a00 1fA0 1cM0 1cM0 1io0 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 1fA0 1a00 1io0 17c0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Df0 Ik0 5M30 WM0 1fA0 1cM0 Vx0 hB0 1aq0 16M0 1ekn0 1cL0 1fC0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Riga|RMT LST EET MSK CEST CET MSD EEST|-1A.y -2A.y -20 -30 -20 -10 -40 -30|010102345454536363636363636363727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272|-25TzA.y 11A0 1iM0 ko0 gWm0 yDXA.y 2bX0 3fE0 WM0 1fA0 1cM0 1cM0 4m0 1sLy0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 1o00 11A0 1o00 11A0 1qM0 3oo0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Rome|CET CEST|-10 -20|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2as10 M00 1cM0 1cM0 14o0 1o00 WM0 1qM0 17c0 1cM0 M3A0 5M20 WM0 1fA0 1cM0 16K0 1iO0 16m0 1de0 1lc0 14m0 1lc0 WO0 1qM0 GTW0 On0 1C10 Lz0 1C10 Lz0 1EN0 Lz0 1C10 Lz0 1zd0 Oo0 1C00 On0 1C10 Lz0 1zd0 On0 1C10 LA0 1C00 LA0 1zc0 Oo0 1C00 Oo0 1zc0 Oo0 1fC0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Samara|LMT SAMT SAMT KUYT KUYST MSD MSK EEST KUYT SAMST SAMST|-3k.k -30 -40 -40 -50 -40 -30 -30 -30 -50 -40|012343434343434343435656782929292929292929292929292929292929292a12|-22WNk.k qHak.k bcn0 1Qqo0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cN0 8o0 14j0 1cL0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qN0 WM0","Europe/Simferopol|SMT EET MSK CEST CET MSD EEST MSK|-2g -20 -30 -20 -10 -40 -30 -40|012343432525252525252525252161616525252616161616161616161616161616161616172|-1Pc2g eUog rEn0 2qs0 WM0 1fA0 1cM0 3V0 1u0L0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1Q00 4eL0 1cL0 1cN0 1cL0 1cN0 dX0 WL0 1cN0 1cL0 1fB0 1o30 11B0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11z0 1nW0","Europe/Sofia|EET CET CEST EEST|-20 -10 -20 -30|01212103030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030|-168L0 WM0 1fA0 1cM0 1cM0 1cN0 1mKH0 1dd0 1fb0 1ap0 1fb0 1a20 1fy0 1a30 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cK0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1nX0 11E0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Stockholm|CET CEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2azC0 TB0 2yDe0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Tallinn|TMT CET CEST EET MSK MSD EEST|-1D -10 -20 -20 -30 -40 -30|012103421212454545454545454546363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363|-26oND teD 11A0 1Ta0 4rXl KSLD 2FX0 2Jg0 WM0 1fA0 1cM0 18J0 1sTX0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o10 11A0 1qM0 5QM0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Tirane|LMT CET CEST|-1j.k -10 -20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2glBj.k 14pcj.k 5LC0 WM0 4M0 1fCK0 10n0 1op0 11z0 1pd0 11z0 1qN0 WL0 1qp0 Xb0 1qp0 Xb0 1qp0 11z0 1lB0 11z0 1qN0 11z0 1iN0 16n0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Uzhgorod|CET CEST MSK MSD EET EEST|-10 -20 -30 -40 -20 -30|010101023232323232323232320454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-1cqL0 6i00 WM0 1fA0 1cM0 1ml0 1Cp0 1r3W0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1Q00 1Nf0 2pw0 1cL0 1cN0 1cL0 1cN0 1cL0 1cQ0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Vienna|CET CEST|-10 -20|0101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 3KM0 14o0 LA00 6i00 WM0 1fA0 1cM0 1cM0 1cM0 400 2qM0 1a00 1cM0 1cM0 1io0 17c0 1gHa0 19X0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Vilnius|WMT KMT CET EET MSK CEST MSD EEST|-1o -1z.A -10 -20 -30 -20 -40 -30|012324525254646464646464646464647373737373737352537373737373737373737373737373737373737373737373737373737373737373737373|-293do 6ILM.o 1Ooz.A zz0 Mfd0 29W0 3is0 WM0 1fA0 1cM0 LV0 1tgL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11B0 1o00 11A0 1qM0 8io0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Volgograd|LMT TSAT STAT STAT VOLT VOLST VOLST VOLT MSD MSK MSK|-2V.E -30 -30 -40 -40 -50 -40 -30 -40 -30 -40|0123454545454545454546767489898989898989898989898989898989898989a9|-21IqV.E cLXV.E cEM0 1gqn0 Lco0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 2pz0 1cJ0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0","Europe/Warsaw|WMT CET CEST EET EEST|-1o -10 -20 -20 -30|012121234312121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2ctdo 1LXo 11d0 1iO0 11A0 1o00 11A0 1on0 11A0 6zy0 HWP0 5IM0 WM0 1fA0 1cM0 1dz0 1mL0 1en0 15B0 1aq0 1nA0 11A0 1io0 17c0 1fA0 1a00 iDX0 LA0 1cM0 1cM0 1C00 Oo0 1cM0 1cM0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1C00 LA0 uso0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","Europe/Zaporozhye|CUT EET MSK CEST CET MSD EEST|-2k -20 -30 -20 -10 -40 -30|01234342525252525252525252526161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161|-1Pc2k eUok rdb0 2RE0 WM0 1fA0 8m0 1v9a0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cK0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cQ0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","HST|HST|a0|0|","Indian/Chagos|LMT IOT IOT|-4N.E -50 -60|012|-2xosN.E 3AGLN.E","Indian/Christmas|CXT|-70|0|","Indian/Cocos|CCT|-6u|0|","Indian/Kerguelen|zzz TFT|0 -50|01|-MG00","Indian/Mahe|LMT SCT|-3F.M -40|01|-2yO3F.M","Indian/Maldives|MMT MVT|-4S -50|01|-olgS","Indian/Mauritius|LMT MUT MUST|-3O -40 -50|012121|-2xorO 34unO 14L0 12kr0 11z0","Indian/Reunion|LMT RET|-3F.Q -40|01|-2mDDF.Q","Kwajalein|MHT KWAT MHT|-b0 c0 -c0|012|-AX0 W9X0","MET|MET MEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 16M0 1gMM0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00","MST|MST|70|0|","MST7MDT|MST MDT MWT MPT|70 60 60 60|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261r0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","NZ-CHAT|CHAST CHAST CHADT|-cf -cJ -dJ|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-WqAf 1adef IM0 1C00 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1qM0 14o0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1io0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00","PST8PDT|PST PDT PWT PPT|80 70 70 70|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261q0 1nX0 11B0 1nX0 SgN0 8x10 iy0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0","Pacific/Apia|LMT WSST SST SDT WSDT WSST|bq.U bu b0 a0 -e0 -d0|01232345454545454545454545454545454545454545454545454545454|-2nDMx.4 1yW03.4 2rRbu 1ff0 1a00 CI0 AQ0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00","Pacific/Bougainville|PGT JST BST|-a0 -90 -b0|0102|-16Wy0 7CN0 2MQp0","Pacific/Chuuk|CHUT|-a0|0|","Pacific/Efate|LMT VUT VUST|-bd.g -b0 -c0|0121212121212121212121|-2l9nd.g 2Szcd.g 1cL0 1oN0 10L0 1fB0 19X0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 Lz0 1Nd0 An0","Pacific/Enderbury|PHOT PHOT PHOT|c0 b0 -d0|012|nIc0 B8n0","Pacific/Fakaofo|TKT TKT|b0 -d0|01|1Gfn0","Pacific/Fiji|LMT FJT FJST|-bT.I -c0 -d0|012121212121212121212121212121212121212121212121212121212121212|-2bUzT.I 3m8NT.I LA0 1EM0 IM0 nJc0 LA0 1o00 Rc0 1wo0 Ao0 1Nc0 Ao0 1Q00 xz0 1SN0 uM0 1SM0 xA0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 xA0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 xA0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1VA0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0","Pacific/Funafuti|TVT|-c0|0|","Pacific/Galapagos|LMT ECT GALT|5W.o 50 60|012|-1yVS1.A 2dTz1.A","Pacific/Gambier|LMT GAMT|8X.M 90|01|-2jof0.c","Pacific/Guadalcanal|LMT SBT|-aD.M -b0|01|-2joyD.M","Pacific/Guam|GST ChST|-a0 -a0|01|1fpq0","Pacific/Honolulu|HST HDT HST|au 9u a0|010102|-1thLu 8x0 lef0 8Pz0 46p0","Pacific/Kiritimati|LINT LINT LINT|aE a0 -e0|012|nIaE B8nk","Pacific/Kosrae|KOST KOST|-b0 -c0|010|-AX0 1bdz0","Pacific/Majuro|MHT MHT|-b0 -c0|01|-AX0","Pacific/Marquesas|LMT MART|9i 9u|01|-2joeG","Pacific/Midway|LMT NST BST SST|bm.M b0 b0 b0|0123|-2nDMB.c 2gVzB.c EyM0","Pacific/Nauru|LMT NRT JST NRT|-b7.E -bu -90 -c0|01213|-1Xdn7.E PvzB.E 5RCu 1ouJu","Pacific/Niue|NUT NUT NUT|bk bu b0|012|-KfME 17y0a","Pacific/Norfolk|NMT NFT|-bc -bu|01|-Kgbc","Pacific/Noumea|LMT NCT NCST|-b5.M -b0 -c0|01212121|-2l9n5.M 2EqM5.M xX0 1PB0 yn0 HeP0 Ao0","Pacific/Palau|PWT|-90|0|","Pacific/Pitcairn|PNT PST|8u 80|01|18Vku","Pacific/Pohnpei|PONT|-b0|0|","Pacific/Port_Moresby|PGT|-a0|0|","Pacific/Rarotonga|CKT CKHST CKT|au 9u a0|012121212121212121212121212|lyWu IL0 1zcu Onu 1zcu Onu 1zcu Rbu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1zcu Onu 1zcu Onu 1zcu Onu","Pacific/Tahiti|LMT TAHT|9W.g a0|01|-2joe1.I","Pacific/Tarawa|GILT|-c0|0|","Pacific/Tongatapu|TOT TOT TOST|-ck -d0 -e0|01212121|-1aB0k 2n5dk 15A0 1wo0 xz0 1Q10 xz0","Pacific/Wake|WAKT|-c0|0|","Pacific/Wallis|WFT|-c0|0|","WET|WET WEST|0 -10|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|hDB0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00"], -links:["Africa/Abidjan|Africa/Bamako","Africa/Abidjan|Africa/Banjul","Africa/Abidjan|Africa/Conakry","Africa/Abidjan|Africa/Dakar","Africa/Abidjan|Africa/Freetown","Africa/Abidjan|Africa/Lome","Africa/Abidjan|Africa/Nouakchott","Africa/Abidjan|Africa/Ouagadougou","Africa/Abidjan|Africa/Sao_Tome","Africa/Abidjan|Africa/Timbuktu","Africa/Abidjan|Atlantic/St_Helena","Africa/Addis_Ababa|Africa/Asmara","Africa/Addis_Ababa|Africa/Asmera","Africa/Addis_Ababa|Africa/Dar_es_Salaam","Africa/Addis_Ababa|Africa/Djibouti","Africa/Addis_Ababa|Africa/Kampala","Africa/Addis_Ababa|Africa/Mogadishu","Africa/Addis_Ababa|Africa/Nairobi","Africa/Addis_Ababa|Indian/Antananarivo","Africa/Addis_Ababa|Indian/Comoro","Africa/Addis_Ababa|Indian/Mayotte","Africa/Bangui|Africa/Brazzaville","Africa/Bangui|Africa/Douala","Africa/Bangui|Africa/Kinshasa","Africa/Bangui|Africa/Lagos","Africa/Bangui|Africa/Libreville","Africa/Bangui|Africa/Luanda","Africa/Bangui|Africa/Malabo","Africa/Bangui|Africa/Niamey","Africa/Bangui|Africa/Porto-Novo","Africa/Blantyre|Africa/Bujumbura","Africa/Blantyre|Africa/Gaborone","Africa/Blantyre|Africa/Harare","Africa/Blantyre|Africa/Kigali","Africa/Blantyre|Africa/Lubumbashi","Africa/Blantyre|Africa/Lusaka","Africa/Blantyre|Africa/Maputo","Africa/Cairo|Egypt","Africa/Johannesburg|Africa/Maseru","Africa/Johannesburg|Africa/Mbabane","Africa/Juba|Africa/Khartoum","Africa/Tripoli|Libya","America/Adak|America/Atka","America/Adak|US/Aleutian","America/Anchorage|US/Alaska","America/Anguilla|America/Antigua","America/Anguilla|America/Dominica","America/Anguilla|America/Grenada","America/Anguilla|America/Guadeloupe","America/Anguilla|America/Marigot","America/Anguilla|America/Montserrat","America/Anguilla|America/Port_of_Spain","America/Anguilla|America/St_Barthelemy","America/Anguilla|America/St_Kitts","America/Anguilla|America/St_Lucia","America/Anguilla|America/St_Thomas","America/Anguilla|America/St_Vincent","America/Anguilla|America/Tortola","America/Anguilla|America/Virgin","America/Argentina/Buenos_Aires|America/Buenos_Aires","America/Argentina/Catamarca|America/Argentina/ComodRivadavia","America/Argentina/Catamarca|America/Catamarca","America/Argentina/Cordoba|America/Cordoba","America/Argentina/Cordoba|America/Rosario","America/Argentina/Jujuy|America/Jujuy","America/Argentina/Mendoza|America/Mendoza","America/Aruba|America/Curacao","America/Aruba|America/Kralendijk","America/Aruba|America/Lower_Princes","America/Atikokan|America/Coral_Harbour","America/Cayman|America/Panama","America/Chicago|US/Central","America/Denver|America/Shiprock","America/Denver|Navajo","America/Denver|US/Mountain","America/Detroit|US/Michigan","America/Edmonton|Canada/Mountain","America/Ensenada|America/Tijuana","America/Ensenada|Mexico/BajaNorte","America/Fort_Wayne|America/Indiana/Indianapolis","America/Fort_Wayne|America/Indianapolis","America/Fort_Wayne|US/East-Indiana","America/Halifax|Canada/Atlantic","America/Havana|Cuba","America/Indiana/Knox|America/Knox_IN","America/Indiana/Knox|US/Indiana-Starke","America/Jamaica|Jamaica","America/Kentucky/Louisville|America/Louisville","America/Los_Angeles|US/Pacific","America/Los_Angeles|US/Pacific-New","America/Manaus|Brazil/West","America/Mazatlan|Mexico/BajaSur","America/Mexico_City|Mexico/General","America/Montreal|America/Toronto","America/Montreal|Canada/Eastern","America/New_York|US/Eastern","America/Noronha|Brazil/DeNoronha","America/Phoenix|US/Arizona","America/Porto_Acre|America/Rio_Branco","America/Porto_Acre|Brazil/Acre","America/Regina|Canada/East-Saskatchewan","America/Regina|Canada/Saskatchewan","America/Santiago|Chile/Continental","America/Sao_Paulo|Brazil/East","America/St_Johns|Canada/Newfoundland","America/Vancouver|Canada/Pacific","America/Whitehorse|Canada/Yukon","America/Winnipeg|Canada/Central","Antarctica/McMurdo|Antarctica/South_Pole","Antarctica/McMurdo|NZ","Antarctica/McMurdo|Pacific/Auckland","Arctic/Longyearbyen|Atlantic/Jan_Mayen","Arctic/Longyearbyen|Europe/Oslo","Asia/Aden|Asia/Kuwait","Asia/Aden|Asia/Riyadh","Asia/Ashgabat|Asia/Ashkhabad","Asia/Bahrain|Asia/Qatar","Asia/Bangkok|Asia/Phnom_Penh","Asia/Bangkok|Asia/Vientiane","Asia/Calcutta|Asia/Kolkata","Asia/Chongqing|Asia/Chungking","Asia/Chongqing|Asia/Harbin","Asia/Chongqing|Asia/Shanghai","Asia/Chongqing|PRC","Asia/Dacca|Asia/Dhaka","Asia/Dubai|Asia/Muscat","Asia/Ho_Chi_Minh|Asia/Saigon","Asia/Hong_Kong|Hongkong","Asia/Istanbul|Europe/Istanbul","Asia/Istanbul|Turkey","Asia/Jerusalem|Asia/Tel_Aviv","Asia/Jerusalem|Israel","Asia/Kashgar|Asia/Urumqi","Asia/Kathmandu|Asia/Katmandu","Asia/Macao|Asia/Macau","Asia/Makassar|Asia/Ujung_Pandang","Asia/Nicosia|Europe/Nicosia","Asia/Seoul|ROK","Asia/Singapore|Singapore","Asia/Taipei|ROC","Asia/Tehran|Iran","Asia/Thimbu|Asia/Thimphu","Asia/Tokyo|Japan","Asia/Ulaanbaatar|Asia/Ulan_Bator","Atlantic/Faeroe|Atlantic/Faroe","Atlantic/Reykjavik|Iceland","Australia/ACT|Australia/Canberra","Australia/ACT|Australia/NSW","Australia/ACT|Australia/Sydney","Australia/Adelaide|Australia/South","Australia/Brisbane|Australia/Queensland","Australia/Broken_Hill|Australia/Yancowinna","Australia/Darwin|Australia/North","Australia/Hobart|Australia/Tasmania","Australia/LHI|Australia/Lord_Howe","Australia/Melbourne|Australia/Victoria","Australia/Perth|Australia/West","Chile/EasterIsland|Pacific/Easter","Eire|Europe/Dublin","Etc/GMT+0|Etc/GMT","Etc/GMT+0|Etc/GMT-0","Etc/GMT+0|Etc/GMT0","Etc/GMT+0|Etc/Greenwich","Etc/GMT+0|GMT","Etc/GMT+0|GMT+0","Etc/GMT+0|GMT-0","Etc/GMT+0|GMT0","Etc/GMT+0|Greenwich","Etc/UCT|UCT","Etc/UTC|Etc/Universal","Etc/UTC|Etc/Zulu","Etc/UTC|UTC","Etc/UTC|Universal","Etc/UTC|Zulu","Europe/Belfast|Europe/Guernsey","Europe/Belfast|Europe/Isle_of_Man","Europe/Belfast|Europe/Jersey","Europe/Belfast|Europe/London","Europe/Belfast|GB","Europe/Belfast|GB-Eire","Europe/Belgrade|Europe/Ljubljana","Europe/Belgrade|Europe/Podgorica","Europe/Belgrade|Europe/Sarajevo","Europe/Belgrade|Europe/Skopje","Europe/Belgrade|Europe/Zagreb","Europe/Bratislava|Europe/Prague","Europe/Busingen|Europe/Vaduz","Europe/Busingen|Europe/Zurich","Europe/Chisinau|Europe/Tiraspol","Europe/Helsinki|Europe/Mariehamn","Europe/Lisbon|Portugal","Europe/Moscow|W-SU","Europe/Rome|Europe/San_Marino","Europe/Rome|Europe/Vatican","Europe/Warsaw|Poland","Kwajalein|Pacific/Kwajalein","NZ-CHAT|Pacific/Chatham","Pacific/Chuuk|Pacific/Truk","Pacific/Chuuk|Pacific/Yap","Pacific/Guam|Pacific/Saipan","Pacific/Honolulu|Pacific/Johnston","Pacific/Honolulu|US/Hawaii","Pacific/Midway|Pacific/Pago_Pago","Pacific/Midway|Pacific/Samoa","Pacific/Midway|US/Samoa","Pacific/Pohnpei|Pacific/Ponape"]}),a}); \ No newline at end of file +!function(M,z){"use strict";"object"==typeof module&&module.exports?module.exports=z(require("moment")):"function"==typeof define&&define.amd?define(["moment"],z):z(M.moment)}(this,function(O){"use strict";void 0===O.version&&O.default&&(O=O.default);var z,W={},A={},c={},d={},R={},M=(O&&"string"==typeof O.version||C("Moment Timezone requires Moment.js. See https://momentjs.com/timezone/docs/#/use-it/browser/"),O.version.split(".")),b=+M[0],p=+M[1];function q(M){return 96= 2.6.0. You are using Moment.js "+O.version+". See momentjs.com"),f.prototype={_set:function(M){this.name=M.name,this.abbrs=M.abbrs,this.untils=M.untils,this.offsets=M.offsets,this.population=M.population},_index:function(M){M=function(M,z){var b,p=z.length;if(M=z[p-2])return p-1;if(M>=z[p-1])return-1;for(var O=0,A=p-1;1= 2.9.0. You are using Moment.js "+O.version+"."),O.defaultZone=M?t(M):null,O};M=O.momentProperties;return"[object Array]"===Object.prototype.toString.call(M)?(M.push("_z"),M.push("_a")):M&&(M._z=null),s({version:"2023d",zones:["Africa/Abidjan|LMT GMT|g.8 0|01|-2ldXH.Q|48e5","Africa/Nairobi|LMT +0230 EAT +0245|-2r.g -2u -30 -2J|012132|-2ua2r.g N6nV.g 3Fbu h1cu dzbJ|47e5","Africa/Algiers|LMT PMT WET WEST CET CEST|-c.c -9.l 0 -10 -10 -20|01232323232323232454542423234542324|-3bQ0c.c MDA2.P cNb9.l HA0 19A0 1iM0 11c0 1oo0 Wo0 1rc0 QM0 1EM0 UM0 DA0 Imo0 rd0 De0 9Xz0 1fb0 1ap0 16K0 2yo0 mEp0 hwL0 jxA0 11A0 dDd0 17b0 11B0 1cN0 2Dy0 1cN0 1fB0 1cL0|26e5","Africa/Lagos|LMT GMT +0030 WAT|-d.z 0 -u -10|01023|-2B40d.z 7iod.z dnXK.p dLzH.z|17e6","Africa/Bissau|LMT -01 GMT|12.k 10 0|012|-2ldX0 2xoo0|39e4","Africa/Maputo|LMT CAT|-2a.k -20|01|-2GJea.k|26e5","Africa/Cairo|LMT EET EEST|-25.9 -20 -30|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2MBC5.9 1AQM5.9 vb0 1ip0 11z0 1iN0 1nz0 12p0 1pz0 10N0 1pz0 16p0 1jz0 s3d0 Vz0 1oN0 11b0 1oO0 10N0 1pz0 10N0 1pb0 10N0 1pb0 10N0 1pb0 10N0 1pz0 10N0 1pb0 10N0 1pb0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1WL0 rd0 1Rz0 wp0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1qL0 Xd0 1oL0 11d0 1oL0 11d0 1pb0 11d0 1oL0 11d0 1oL0 11d0 1ny0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 WL0 1qN0 Rb0 1wp0 On0 1zd0 Lz0 1EN0 Fb0 c10 8n0 8Nd0 gL0 e10 mn0 kSp0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1a10 1fz0|15e6","Africa/Casablanca|LMT +00 +01|u.k 0 -10|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-2gMnt.E 130Lt.E rb0 Dd0 dVb0 b6p0 TX0 EoB0 LL0 gnd0 rz0 43d0 AL0 1Nd0 XX0 1Cp0 pz0 dEp0 4mn0 SyN0 AL0 1Nd0 wn0 1FB0 Db0 1zd0 Lz0 1Nf0 wM0 co0 go0 1o00 s00 dA0 vc0 11A0 A00 e00 y00 11A0 uM0 e00 Dc0 11A0 s00 e00 IM0 WM0 mo0 gM0 LA0 WM0 jA0 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0|32e5","Africa/Ceuta|LMT WET WEST CET CEST|l.g 0 -10 -10 -20|0121212121212121212121343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-2M0M0 GdX0 11z0 drd0 18p0 3HX0 17d0 1fz0 1a10 1io0 1a00 1y7o0 LL0 gnd0 rz0 43d0 AL0 1Nd0 XX0 1Cp0 pz0 dEp0 4VB0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|85e3","Africa/El_Aaiun|LMT -01 +00 +01|Q.M 10 0 -10|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323|-1rDz7.c 1GVA7.c 6L0 AL0 1Nd0 XX0 1Cp0 pz0 1cBB0 AL0 1Nd0 wn0 1FB0 Db0 1zd0 Lz0 1Nf0 wM0 co0 go0 1o00 s00 dA0 vc0 11A0 A00 e00 y00 11A0 uM0 e00 Dc0 11A0 s00 e00 IM0 WM0 mo0 gM0 LA0 WM0 jA0 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0 2600 e00 28M0 e00 2600 gM0|20e4","Africa/Johannesburg|LMT SAST SAST SAST|-1Q -1u -20 -30|0123232|-39EpQ qTcm 1Ajdu 1cL0 1cN0 1cL0|84e5","Africa/Juba|LMT CAT CAST EAT|-26.s -20 -30 -30|012121212121212121212121212121212131|-1yW26.s 1zK06.s 16L0 1iN0 17b0 1jd0 17b0 1ip0 17z0 1i10 17X0 1hB0 18n0 1hd0 19b0 1gp0 19z0 1iN0 17b0 1ip0 17z0 1i10 18n0 1hd0 18L0 1gN0 19b0 1gp0 19z0 1iN0 17z0 1i10 17X0 yGd0 PeX0|","Africa/Khartoum|LMT CAT CAST EAT|-2a.8 -20 -30 -30|012121212121212121212121212121212131|-1yW2a.8 1zK0a.8 16L0 1iN0 17b0 1jd0 17b0 1ip0 17z0 1i10 17X0 1hB0 18n0 1hd0 19b0 1gp0 19z0 1iN0 17b0 1ip0 17z0 1i10 18n0 1hd0 18L0 1gN0 19b0 1gp0 19z0 1iN0 17z0 1i10 17X0 yGd0 HjL0|51e5","Africa/Monrovia|LMT MMT MMT GMT|H.8 H.8 I.u 0|0123|-3ygng.Q 1usM0 28G01.m|11e5","Africa/Ndjamena|LMT WAT WAST|-10.c -10 -20|0121|-2le10.c 2J3c0.c Wn0|13e5","Africa/Sao_Tome|LMT LMT GMT WAT|-q.U A.J 0 -10|01232|-3tooq.U 18aoq.U 4i6N0 2q00|","Africa/Tripoli|LMT CET CEST EET|-Q.I -10 -20 -20|012121213121212121212121213123123|-21JcQ.I 1hnBQ.I vx0 4iP0 xx0 4eN0 Bb0 7ip0 U0n0 A10 1db0 1cN0 1db0 1dd0 1db0 1eN0 1bb0 1e10 1cL0 1c10 1db0 1dd0 1db0 1cN0 1db0 1q10 fAn0 1ep0 1db0 AKq0 TA0 1o00|11e5","Africa/Tunis|LMT PMT CET CEST|-E.I -9.l -10 -20|01232323232323232323232323232323232|-3zO0E.I 1cBAv.n 18pa9.l 1qM0 DA0 3Tc0 11B0 1ze0 WM0 7z0 3d0 14L0 1cN0 1f90 1ar0 16J0 1gXB0 WM0 1rA0 11c0 nwo0 Ko0 1cM0 1cM0 1rA0 10M0 zuM0 10N0 1aN0 1qM0 WM0 1qM0 11A0 1o00|20e5","Africa/Windhoek|LMT +0130 SAST SAST CAT WAT|-18.o -1u -20 -30 -20 -10|012324545454545454545454545454545454545454545454545454|-39Ep8.o qTbC.o 1Ajdu 1cL0 1SqL0 9Io0 16P0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0|32e4","America/Adak|LMT LMT NST NWT NPT BST BDT AHST HST HDT|-cd.m bK.C b0 a0 a0 b0 a0 a0 a0 90|01234256565656565656565656565656565678989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898|-48Pzs.L 1jVzf.p 1EX1d.m 8wW0 iB0 Qlb0 52O0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cm0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|326","America/Anchorage|LMT LMT AST AWT APT AHST AHDT YST AKST AKDT|-e0.o 9X.A a0 90 90 a0 90 90 90 80|01234256565656565656565656565656565678989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898|-48Pzs.L 1jVxs.n 1EX20.o 8wX0 iA0 Qlb0 52O0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cm0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|30e4","America/Puerto_Rico|LMT AST AWT APT|4o.p 40 30 30|01231|-2Qi7z.z 1IUbz.z 7XT0 iu0|24e5","America/Araguaina|LMT -03 -02|3c.M 30 20|0121212121212121212121212121212121212121212121212121|-2glwL.c HdKL.c 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 dMN0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 ny10 Lz0|14e4","America/Argentina/Buenos_Aires|LMT CMT -04 -03 -02|3R.M 4g.M 40 30 20|012323232323232323232323232323232323232323234343434343434343|-331U6.c 125cn pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wp0 Rb0 1wp0 TX0 A4p0 uL0 1qN0 WL0|","America/Argentina/Catamarca|LMT CMT -04 -03 -02|4n.8 4g.M 40 30 20|012323232323232323232323232323232323232323234343434243432343|-331TA.Q 125bR.E pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 rlB0 7B0 8zb0 uL0|","America/Argentina/Cordoba|LMT CMT -04 -03 -02|4g.M 4g.M 40 30 20|012323232323232323232323232323232323232323234343434243434343|-331TH.c 125c0 pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 A4p0 uL0 1qN0 WL0|","America/Argentina/Jujuy|LMT CMT -04 -03 -02|4l.c 4g.M 40 30 20|0123232323232323232323232323232323232323232343434232434343|-331TC.M 125bT.A pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1ze0 TX0 1ld0 WK0 1wp0 TX0 A4p0 uL0|","America/Argentina/La_Rioja|LMT CMT -04 -03 -02|4r.o 4g.M 40 30 20|0123232323232323232323232323232323232323232343434342343432343|-331Tw.A 125bN.o pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Qn0 qO0 16n0 Rb0 1wp0 TX0 rlB0 7B0 8zb0 uL0|","America/Argentina/Mendoza|LMT CMT -04 -03 -02|4z.g 4g.M 40 30 20|012323232323232323232323232323232323232323234343423232432343|-331To.I 125bF.w pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1u20 SL0 1vd0 Tb0 1wp0 TW0 ri10 Op0 7TX0 uL0|","America/Argentina/Rio_Gallegos|LMT CMT -04 -03 -02|4A.Q 4g.M 40 30 20|012323232323232323232323232323232323232323234343434343432343|-331Tn.8 125bD.U pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wp0 Rb0 1wp0 TX0 rlB0 7B0 8zb0 uL0|","America/Argentina/Salta|LMT CMT -04 -03 -02|4l.E 4g.M 40 30 20|0123232323232323232323232323232323232323232343434342434343|-331TC.k 125bT.8 pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 A4p0 uL0|","America/Argentina/San_Juan|LMT CMT -04 -03 -02|4y.4 4g.M 40 30 20|0123232323232323232323232323232323232323232343434342343432343|-331Tp.U 125bG.I pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Qn0 qO0 16n0 Rb0 1wp0 TX0 rld0 m10 8lb0 uL0|","America/Argentina/San_Luis|LMT CMT -04 -03 -02|4p.o 4g.M 40 30 20|0123232323232323232323232323232323232323232343434232323432323|-331Ty.A 125bP.o pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 XX0 1q20 SL0 AN0 vDb0 m10 8lb0 8L0 jd0 1qN0 WL0 1qN0|","America/Argentina/Tucuman|LMT CMT -04 -03 -02|4k.Q 4g.M 40 30 20|01232323232323232323232323232323232323232323434343424343234343|-331TD.8 125bT.U pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wq0 Ra0 1wp0 TX0 rlB0 4N0 8BX0 uL0 1qN0 WL0|","America/Argentina/Ushuaia|LMT CMT -04 -03 -02|4x.c 4g.M 40 30 20|012323232323232323232323232323232323232323234343434343432343|-331Tq.M 125bH.A pKnH.c Mn0 1iN0 Tb0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 1C10 LX0 1C10 LX0 1C10 LX0 1C10 Mn0 MN0 2jz0 MN0 4lX0 u10 5Lb0 1pB0 Fnz0 u10 uL0 1vd0 SL0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 zvd0 Bz0 1tB0 TX0 1wp0 Rb0 1wp0 Rb0 1wp0 TX0 rkN0 8p0 8zb0 uL0|","America/Asuncion|LMT AMT -04 -03|3O.E 3O.E 40 30|0123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323|-3eLw9.k 1FGo0 1DKM9.k 3CL0 3Dd0 10L0 1pB0 10n0 1pB0 10n0 1pB0 1cL0 1dd0 1db0 1dd0 1cL0 1dd0 1cL0 1dd0 1cL0 1dd0 1db0 1dd0 1cL0 1dd0 1cL0 1dd0 1cL0 1dd0 1db0 1dd0 1cL0 1lB0 14n0 1dd0 1cL0 1fd0 WL0 1rd0 1aL0 1dB0 Xz0 1qp0 Xb0 1qN0 10L0 1rB0 TX0 1tB0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 WN0 1qL0 11B0 1nX0 1ip0 WL0 1qN0 WL0 1qN0 WL0 1tB0 TX0 1tB0 TX0 1tB0 19X0 1a10 1fz0 1a10 1fz0 1cN0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0|28e5","America/Panama|LMT CMT EST|5i.8 5j.A 50|012|-3eLuF.Q Iy01.s|15e5","America/Bahia_Banderas|LMT MST CST MDT PST CDT|71 70 60 60 80 50|0121312141313131313131313131313131313152525252525252525252525252|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 otX0 gmN0 P2N0 13Vd0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nW0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0|84e3","America/Bahia|LMT -03 -02|2y.4 30 20|01212121212121212121212121212121212121212121212121212121212121|-2glxp.U HdLp.U 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 l5B0 Rb0|27e5","America/Barbados|LMT AST ADT -0330|3W.t 40 30 3u|0121213121212121|-2m4k1.v 1eAN1.v RB0 1Bz0 Op0 1rb0 11d0 1jJc0 IL0 1ip0 17b0 1ip0 17b0 1ld0 13b0|28e4","America/Belem|LMT -03 -02|3d.U 30 20|012121212121212121212121212121|-2glwK.4 HdKK.4 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0|20e5","America/Belize|LMT CST -0530 CWT CPT CDT|5Q.M 60 5u 50 50 50|012121212121212121212121212121212121212121212121213412121212121212121212121212121212121212121215151|-2kBu7.c fPA7.c Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu Rcu 7Bt0 Ni0 4nd0 Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1wou Rbu 1wou Rbu 1zcu Onu e9Au qn0 lxB0 mn0|57e3","America/Boa_Vista|LMT -04 -03|42.E 40 30|0121212121212121212121212121212121|-2glvV.k HdKV.k 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 smp0 WL0 1tB0 2L0|62e2","America/Bogota|LMT BMT -05 -04|4U.g 4U.g 50 40|01232|-3sTv3.I 1eIo0 38yo3.I 1PX0|90e5","America/Boise|LMT PST PDT MST MWT MPT MDT|7I.N 80 70 70 60 60 60|01212134536363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363|-3tFE0 1nEe0 1nX0 11B0 1nX0 8C10 JCL0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 Dd0 1Kn0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|21e4","America/Cambridge_Bay|-00 MST MWT MPT MDT CST CDT EST|0 70 60 60 60 60 50 50|012314141414141414141414141414141414141414141414141414141414567541414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141414141|-21Jc0 RO90 8x20 ix0 14HB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11A0 1nX0 2K0 WQ0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|15e2","America/Campo_Grande|LMT -04 -03|3C.s 40 30|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2glwl.w HdLl.w 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 1C10 Lz0 1Ip0 HX0 1zd0 On0 1HB0 IL0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1HB0 FX0|77e4","America/Cancun|LMT CST EST EDT CDT|5L.4 60 50 40 50|0123232341414141414141414141414141414141412|-1UQG0 2q2o0 yLB0 1lb0 14p0 1lb0 14p0 Lz0 xB0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 Dd0|63e4","America/Caracas|LMT CMT -0430 -04|4r.I 4r.E 4u 40|012323|-3eLvw.g ROnX.U 28KM2.k 1IwOu kqo0|29e5","America/Cayenne|LMT -04 -03|3t.k 40 30|012|-2mrwu.E 2gWou.E|58e3","America/Chicago|LMT CST CDT EST CWT CPT|5O.A 60 50 50 50 50|012121212121212121212121212121212121213121212121214512121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tFG0 1nEe0 1nX0 11B0 1nX0 1wp0 TX0 WN0 1qL0 1cN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 11B0 1Hz0 14p0 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 RB0 8x30 iw0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|92e5","America/Chihuahua|LMT MST CST MDT CDT|74.k 70 60 60 50|0121312424231313131313131313131313131313131313131313131313132|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 2zQN0 1lb0 14p0 1lb0 14q0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0|81e4","America/Ciudad_Juarez|LMT MST CST MDT CDT|75.U 70 60 60 50|0121312424231313131313131313131313131313131313131313131313132131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131213131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 2zQN0 1lb0 14p0 1lb0 14q0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1wn0 cm0 EP0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 .1 9xX.X EP0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Costa_Rica|LMT SJMT CST CDT|5A.d 5A.d 60 50|01232323232|-3eLun.L 1fyo0 2lu0n.L Db0 1Kp0 Db0 pRB0 15b0 1kp0 mL0|12e5","America/Phoenix|LMT MST MDT MWT|7s.i 70 60 60|012121313121|-3tFF0 1nEe0 1nX0 11B0 1nX0 SgN0 4Al1 Ap0 1db0 SWqX 1cL0|42e5","America/Cuiaba|LMT -04 -03|3I.k 40 30|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2glwf.E HdLf.E 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 4a10 HX0 1zd0 On0 1HB0 IL0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1HB0 FX0|54e4","America/Danmarkshavn|LMT -03 -02 GMT|1e.E 30 20 0|01212121212121212121212121212121213|-2a5WJ.k 2z5fJ.k 19U0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 DC0|8","America/Dawson_Creek|LMT PST PDT PWT PPT MST|80.U 80 70 70 70 70|01213412121212121212121212121212121212121212121212121212125|-3tofX.4 1nspX.4 1in0 UGp0 8x10 iy0 3NB0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 ML0|12e3","America/Dawson|LMT YST YDT YWT YPT YDDT PST PDT MST|9h.E 90 80 80 80 70 80 70 70|0121213415167676767676767676767676767676767676767676767676767676767676767676767676767676767678|-2MSeG.k GWpG.k 1in0 1o10 13V0 Ser0 8x00 iz0 LCL0 1fA0 jrA0 fNd0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1z90|13e2","America/Denver|LMT MST MDT MWT MPT|6X.U 70 60 60 60|012121212134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tFF0 1nEe0 1nX0 11B0 1nX0 11B0 1qL0 WN0 mn0 Ord0 8x20 ix0 LCN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|26e5","America/Detroit|LMT CST EST EWT EPT EDT|5w.b 60 50 40 40 40|0123425252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252|-2Cgir.N peqr.N 156L0 8x40 iv0 6fd0 11z0 JxX1 SMX 1cN0 1cL0 aW10 1cL0 s10 1Vz0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|37e5","America/Edmonton|LMT MST MDT MWT MPT|7x.Q 70 60 60 60|0121212121212134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2yd4q.8 shdq.8 1in0 17d0 hz0 2dB0 1fz0 1a10 11z0 1qN0 WL0 1qN0 11z0 IGN0 8x20 ix0 3NB0 11z0 XQp0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|10e5","America/Eirunepe|LMT -05 -04|4D.s 50 40|0121212121212121212121212121212121|-2glvk.w HdLk.w 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 dPB0 On0 yTd0 d5X0|31e3","America/El_Salvador|LMT CST CDT|5U.M 60 50|012121|-1XiG3.c 2Fvc3.c WL0 1qN0 WL0|11e5","America/Tijuana|LMT MST PST PDT PWT PPT|7M.4 70 80 70 70 70|012123245232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1UQF0 4Q00 8mM0 8lc0 SN0 1cL0 pHB0 83r0 zI0 5O10 1Rz0 cOO0 11A0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 BUp0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|20e5","America/Fort_Nelson|LMT PST PDT PWT PPT MST|8a.L 80 70 70 70 70|012134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121215|-3tofN.d 1nspN.d 1in0 UGp0 8x10 iy0 3NB0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0|39e2","America/Fort_Wayne|LMT CST CDT CWT CPT EST EDT|5I.C 60 50 50 50 50 40|0121212134121212121212121212151565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 QI10 Db0 RB0 8x30 iw0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 5Tz0 1o10 qLb0 1cL0 1cN0 1cL0 1qhd0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Fortaleza|LMT -03 -02|2y 30 20|0121212121212121212121212121212121212121|-2glxq HdLq 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 nsp0 WL0 1tB0 5z0 2mN0 On0|34e5","America/Glace_Bay|LMT AST ADT AWT APT|3X.M 40 30 30 30|012134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2IsI0.c CwO0.c 1in0 UGp0 8x50 iu0 iq10 11z0 Jg10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|19e3","America/Godthab|LMT -03 -02 -01|3q.U 30 20 10|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-2a5Ux.4 2z5dx.4 19U0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 2so0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|17e3","America/Goose_Bay|LMT NST NDT NST NDT NWT NPT AST ADT ADDT|41.E 3u.Q 2u.Q 3u 2u 2u 2u 40 30 20|0121343434343434356343434343434343434343434343434343434343437878787878787878787878787878787878787878787879787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787|-3tojW.k 1nspt.c 1in0 DXb0 2HbX.8 WL0 1qN0 WL0 1qN0 WL0 1tB0 TX0 1tB0 WL0 1qN0 WL0 1qN0 7UHu itu 1tB0 WL0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1tB0 WL0 1ld0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 S10 g0u 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14n1 1lb0 14p0 1nW0 11C0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zcX Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|76e2","America/Grand_Turk|LMT KMT EST EDT AST|4I.w 57.a 50 40 40|01232323232323232323232323232323232323232323232323232323232323232323232323243232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3eLvf.s RK0m.C 2HHBQ.O 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 7jA0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|37e2","America/Guatemala|LMT CST CDT|62.4 60 50|0121212121|-24KhV.U 2efXV.U An0 mtd0 Nz0 ifB0 17b0 zDB0 11z0|13e5","America/Guayaquil|LMT QMT -05 -04|5j.k 5e 50 40|01232|-3eLuE.E 1DNzS.E 2uILK rz0|27e5","America/Guyana|LMT -04 -0345 -03|3Q.D 40 3J 30|01231|-2mf87.l 8Hc7.l 2r7bJ Ey0f|80e4","America/Halifax|LMT AST ADT AWT APT|4e.o 40 30 30 30|0121212121212121212121212121212121212121212121212134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2IsHJ.A xzzJ.A 1db0 3I30 1in0 3HX0 IL0 1E10 ML0 1yN0 Pb0 1Bd0 Mn0 1Bd0 Rz0 1w10 Xb0 1w10 LX0 1w10 Xb0 1w10 Lz0 1C10 Jz0 1E10 OL0 1yN0 Un0 1qp0 Xb0 1qp0 11X0 1w10 Lz0 1HB0 LX0 1C10 FX0 1w10 Xb0 1qp0 Xb0 1BB0 LX0 1td0 Xb0 1qp0 Xb0 Rf0 8x50 iu0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 3Qp0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 3Qp0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 6i10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|39e4","America/Havana|LMT HMT CST CDT|5t.s 5t.A 50 40|0123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3eLuu.w 1qx00.8 72zu.o ML0 sld0 An0 1Nd0 Db0 1Nd0 An0 6Ep0 An0 1Nd0 An0 JDd0 Mn0 1Ap0 On0 1fd0 11X0 1qN0 WL0 1wp0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 14n0 1ld0 14L0 1kN0 15b0 1kp0 1cL0 1cN0 1fz0 1a10 1fz0 1fB0 11z0 14p0 1nX0 11B0 1nX0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 1a10 1in0 1a10 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 17c0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 11A0 6i00 Rc0 1wo0 U00 1tA0 Rc0 1wo0 U00 1wo0 U00 1zc0 U00 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0|21e5","America/Hermosillo|LMT MST CST MDT PST|7n.Q 70 60 60 80|0121312141313131|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 otX0 gmN0 P2N0 13Vd0 1lb0 14p0 1lb0 14p0 1lb0|64e4","America/Indiana/Knox|LMT CST CDT CWT CPT EST|5K.u 60 50 50 50 50|01212134121212121212121212121212121212151212121212121212121212121212121212121212121212121252121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 3NB0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 11z0 1o10 11z0 1o10 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 3Cn0 8wp0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 z8o0 1o00 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Indiana/Marengo|LMT CST CDT CWT CPT EST EDT|5J.n 60 50 50 50 50 40|01212134121212121212121215656565656525656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 dyN0 11z0 6fd0 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 jrz0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1VA0 LA0 1BX0 1e6p0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Indiana/Petersburg|LMT CST CDT CWT CPT EST EDT|5N.7 60 50 50 50 50 40|012121341212121212121212121215121212121212121212121252125656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 njX0 WN0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 3Fb0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 19co0 1o00 Rd0 1zb0 Oo0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Indiana/Tell_City|LMT CST CDT CWT CPT EST EDT|5L.3 60 50 50 50 50 40|012121341212121212121212121512165652121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 njX0 WN0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 8wn0 1cN0 1cL0 1cN0 1cK0 1cN0 1cL0 1qhd0 1o00 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Indiana/Vevay|LMT CST CDT CWT CPT EST EDT|5E.g 60 50 50 50 50 40|0121213415656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 kPB0 Awn0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1lnd0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Indiana/Vincennes|LMT CST CDT CWT CPT EST EDT|5O.7 60 50 50 50 50 40|012121341212121212121212121212121565652125656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 g0p0 11z0 1o10 11z0 1qL0 WN0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 caL0 1cL0 1cN0 1cL0 1qhd0 1o00 Rd0 1zb0 Oo0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Indiana/Winamac|LMT CST CDT CWT CPT EST EDT|5K.p 60 50 50 50 50 40|012121341212121212121212121212121212121565652165656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 jrz0 1cL0 1cN0 1cL0 1qhd0 1o00 Rd0 1za0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Inuvik|-00 PST PDT MDT MST|0 80 70 60 70|01212121212121213434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-FnA0 L3K0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cK0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|35e2","America/Iqaluit|-00 EWT EPT EST EDT CST CDT|0 40 40 50 40 60 50|0123434343434343434343434343434343434343434343434343434343456343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-16K00 7nX0 iv0 14HB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11C0 1nX0 11A0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|67e2","America/Jamaica|LMT KMT EST EDT|57.a 57.a 50 40|01232323232323232323232|-3eLuQ.O RK00 2uM1Q.O 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0|94e4","America/Juneau|LMT LMT PST PWT PPT PDT YDT YST AKST AKDT|-f2.j 8V.F 80 70 70 70 80 90 90 80|0123425252525252525252525252625252578989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898|-48Pzs.L 1jVwq.s 1EX12.j 8x10 iy0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cM0 1cM0 1cL0 1cN0 1fz0 1a10 1fz0 co0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|33e3","America/Kentucky/Louisville|LMT CST CDT CWT CPT EST EDT|5H.2 60 50 50 50 50 40|01212121213412121212121212121212121212565656565656525656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 3Fd0 Nb0 LPd0 11z0 RB0 8x30 iw0 1nX1 e0X 9vd0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 xz0 gso0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1VA0 LA0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Kentucky/Monticello|LMT CST CDT CWT CPT EST EDT|5D.o 60 50 50 50 50 40|01212134121212121212121212121212121212121212121212121212121212121212121212565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFG0 1nEe0 1nX0 11B0 1nX0 SgN0 8x30 iw0 SWp0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11A0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/La_Paz|LMT CMT BST -04|4w.A 4w.A 3w.A 40|0123|-3eLvr.o 1FIo0 13b0|19e5","America/Lima|LMT LMT -05 -04|58.c 58.A 50 40|01232323232323232|-3eLuP.M JcM0.o 1bDzP.o zX0 1aN0 1cL0 1cN0 1cL0 1PrB0 zX0 1O10 zX0 6Gp0 zX0 98p0 zX0|11e6","America/Los_Angeles|LMT PST PDT PWT PPT|7Q.W 80 70 70 70|0121213412121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tFE0 1nEe0 1nX0 11B0 1nX0 SgN0 8x10 iy0 5Wp1 1VaX 3dA0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1a00 1fA0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|15e6","America/Maceio|LMT -03 -02|2m.Q 30 20|012121212121212121212121212121212121212121|-2glxB.8 HdLB.8 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 dMN0 Lz0 8Q10 WL0 1tB0 5z0 2mN0 On0|93e4","America/Managua|LMT MMT CST EST CDT|5J.8 5J.c 60 50 50|01232424232324242|-3eLue.Q 1Mhc0.4 1yAMe.M 4mn0 9Up0 Dz0 1K10 Dz0 s3F0 1KH0 DB0 9In0 k8p0 19X0 1o30 11y0|22e5","America/Manaus|LMT -04 -03|40.4 40 30|01212121212121212121212121212121|-2glvX.U HdKX.U 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 dPB0 On0|19e5","America/Martinique|LMT FFMT AST ADT|44.k 44.k 40 30|01232|-3eLvT.E PTA0 2LPbT.E 19X0|39e4","America/Matamoros|LMT CST CDT|6u 60 50|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1UQG0 2FjC0 1nX0 i6p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|45e4","America/Mazatlan|LMT MST CST MDT PST|75.E 70 60 60 80|0121312141313131313131313131313131313131313131313131313131313131|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 otX0 gmN0 P2N0 13Vd0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0|44e4","America/Menominee|LMT CST CDT CWT CPT EST|5O.r 60 50 50 50 50|012121341212152121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3pdG9.x 1jce9.x 1nX0 11B0 1nX0 SgN0 8x30 iw0 1o10 11z0 LCN0 1fz0 6410 9Jb0 1cM0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|85e2","America/Merida|LMT CST EST CDT|5W.s 60 50 50|0121313131313131313131313131313131313131313131313131313131|-1UQG0 2q2o0 2hz0 wu30 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0|11e5","America/Metlakatla|LMT LMT PST PWT PPT PDT AKST AKDT|-fd.G 8K.i 80 70 70 70 90 80|0123425252525252525252525252525252526767672676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-48Pzs.L 1jVwf.5 1EX1d.G 8x10 iy0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1hU10 Rd0 1zb0 Op0 1zb0 Op0 1zb0 uM0 jB0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|14e2","America/Mexico_City|LMT MST CST MDT CDT CWT|6A.A 70 60 60 50 50|012131242425242424242424242424242424242424242424242424242424242424242|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 gEn0 TX0 3xd0 Jb0 6zB0 SL0 e5d0 17b0 1Pff0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0|20e6","America/Miquelon|LMT AST -03 -02|3I.E 40 30 20|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-2mKkf.k 2LTAf.k gQ10 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|61e2","America/Moncton|LMT EST AST ADT AWT APT|4j.8 50 40 30 30 30|0123232323232323232323245232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3txvE.Q J4ME.Q CwN0 1in0 zAo0 An0 1Nd0 An0 1Nd0 An0 1Nd0 An0 1Nd0 An0 1Nd0 An0 1K10 Lz0 1zB0 NX0 1u10 Wn0 S20 8x50 iu0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 3Cp0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14n1 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 ReX 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|64e3","America/Monterrey|LMT CST CDT|6F.g 60 50|0121212121212121212121212121212121212121212121212121212121|-1UQG0 2FjC0 1nX0 i6p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0|41e5","America/Montevideo|LMT MMT -04 -03 -0330 -0230 -02 -0130|3I.P 3I.P 40 30 3u 2u 20 1u|012343434343434343434343435353636353636375363636363636363636363636363636363636363636363|-2tRUf.9 sVc0 8jcf.9 1db0 1dcu 1cLu 1dcu 1cLu ircu 11zu 1o0u 11zu 1o0u 11zu 1o0u 11zu 1qMu WLu 1qMu WLu 1fAu 1cLu 1o0u 11zu NAu 3jXu zXu Dq0u 19Xu pcu jz0 cm10 19X0 6tB0 1fbu 3o0u jX0 4vB0 xz0 3Cp0 mmu 1a10 IMu Db0 4c10 uL0 1Nd0 An0 1SN0 uL0 mp0 28L0 iPB0 un0 1SN0 xz0 1zd0 Lz0 1zd0 Rb0 1zd0 On0 1wp0 Rb0 s8p0 1fB0 1ip0 11z0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 11z0|17e5","America/Toronto|LMT EST EDT EWT EPT|5h.w 50 40 40 40|012121212121212121212121212121212121212121212123412121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-32B6G.s UFdG.s 1in0 11Wu 1nzu 1fD0 WJ0 1wr0 Nb0 1Ap0 On0 1zd0 On0 1wp0 TX0 1tB0 TX0 1tB0 TX0 1tB0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 4kM0 8x40 iv0 1o10 11z0 1nX0 11z0 1o10 11z0 1o10 1qL0 11D0 1nX0 11B0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|65e5","America/New_York|LMT EST EDT EWT EPT|4U.2 50 40 40 40|012121212121212121212121212121212121212121212121213412121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tFH0 1nEe0 1nX0 11B0 1nX0 11B0 1qL0 1a10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 RB0 8x40 iv0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|21e6","America/Nome|LMT LMT NST NWT NPT BST BDT YST AKST AKDT|-cW.m b1.C b0 a0 a0 b0 a0 90 90 80|01234256565656565656565656565656565678989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898989898|-48Pzs.L 1jVyu.p 1EX1W.m 8wW0 iB0 Qlb0 52O0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cl0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|38e2","America/Noronha|LMT -02 -01|29.E 20 10|0121212121212121212121212121212121212121|-2glxO.k HdKO.k 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 nsp0 WL0 1tB0 2L0 2pB0 On0|30e2","America/North_Dakota/Beulah|LMT MST MDT MWT MPT CST CDT|6L.7 70 60 60 60 60 50|0121213412121212121212121212121212121212121212121212121212121212121212121212121212121212121212125656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFF0 1nEe0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Oo0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/North_Dakota/Center|LMT MST MDT MWT MPT CST CDT|6J.c 70 60 60 60 60 50|0121213412121212121212121212121212121212121212121212121212125656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFF0 1nEe0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14o0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/North_Dakota/New_Salem|LMT MST MDT MWT MPT CST CDT|6J.D 70 60 60 60 60 50|0121213412121212121212121212121212121212121212121212121212121212121212121212121212565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tFF0 1nEe0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14o0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","America/Ojinaga|LMT MST CST MDT CDT|6V.E 70 60 60 50|0121312424231313131313131313131313131313131313131313131313132424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242|-1UQF0 deL0 8lc0 17c0 10M0 1dd0 2zQN0 1lb0 14p0 1lb0 14q0 1lb0 14p0 1nX0 11B0 1nX0 1fB0 WL0 1fB0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 U10 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1wn0 Rc0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|23e3","America/Paramaribo|LMT PMT PMT -0330 -03|3E.E 3E.Q 3E.A 3u 30|01234|-2nDUj.k Wqo0.c qanX.I 1yVXN.o|24e4","America/Port-au-Prince|LMT PPMT EST EDT|4N.k 4N 50 40|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3eLva.E 15RLX.E 2FnMb 19X0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14q0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 i6n0 1nX0 11B0 1nX0 d430 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 3iN0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|23e5","America/Rio_Branco|LMT -05 -04|4v.c 50 40|01212121212121212121212121212121|-2glvs.M HdLs.M 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 NBd0 d5X0|31e4","America/Porto_Velho|LMT -04 -03|4f.A 40 30|012121212121212121212121212121|-2glvI.o HdKI.o 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0|37e4","America/Punta_Arenas|LMT SMT -05 -04 -03|4H.E 4G.J 50 40 30|01213132323232323232343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-3eLvg.k MJbX.5 fJAh.f 5knG.J 1Vzh.f jRAG.J 1pbh.f 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 nHX0 op0 blz0 ko0 Qeo0 WL0 1zd0 On0 1ip0 11z0 1o10 11z0 1qN0 WL0 1ld0 14n0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 46n0 Ap0|","America/Winnipeg|LMT CST CDT CWT CPT|6s.A 60 50 50 50|0121212134121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3kLtv.o 1a3bv.o WL0 3ND0 1in0 Jap0 Rb0 aCN0 8x30 iw0 1tB0 11z0 1ip0 11z0 1o10 11z0 1o10 11z0 1rd0 10L0 1op0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 1cL0 1cN0 11z0 6i10 WL0 6i10 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1a00 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1a00 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1o00 14o0 1lc0 14o0 1lc0 14o0 1o00 11A0 1o00 11A0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|66e4","America/Rankin_Inlet|-00 CST CDT EST|0 60 50 50|01212121212121212121212121212121212121212121212121212121212321212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-vDc0 Bjk0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|26e2","America/Recife|LMT -03 -02|2j.A 30 20|0121212121212121212121212121212121212121|-2glxE.o HdLE.o 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 nsp0 WL0 1tB0 2L0 2pB0 On0|33e5","America/Regina|LMT MST MDT MWT MPT CST|6W.A 70 60 60 60 60|012121212121212121212121341212121212121212121212121215|-2AD51.o uHe1.o 1in0 s2L0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 66N0 1cL0 1cN0 19X0 1fB0 1cL0 1fB0 1cL0 1cN0 1cL0 M30 8x20 ix0 1ip0 1cL0 1ip0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 3NB0 1cL0 1cN0|19e4","America/Resolute|-00 CST CDT EST|0 60 50 50|01212121212121212121212121212121212121212121212121212121212321212121212321212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-SnA0 103I0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|229","America/Santarem|LMT -04 -03|3C.M 40 30|0121212121212121212121212121212|-2glwl.c HdLl.c 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 qe10 xb0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 NBd0|21e4","America/Santiago|LMT SMT -05 -04 -03|4G.J 4G.J 50 40 30|0121313232323232323432343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-3eLvh.f MJc0 fJAh.f 5knG.J 1Vzh.f jRAG.J 1pbh.f 11d0 1oL0 11d0 1oL0 11d0 1oL0 11d0 1pb0 11d0 nHX0 op0 9Bz0 hX0 1q10 ko0 Qeo0 WL0 1zd0 On0 1ip0 11z0 1o10 11z0 1qN0 WL0 1ld0 14n0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 46n0 Ap0 1Nb0 Ap0 1Nb0 Ap0 1zb0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0|62e5","America/Santo_Domingo|LMT SDMT EST EDT -0430 AST|4D.A 4E 50 40 4u 40|012324242424242525|-3eLvk.o 1Jic0.o 1lJMk Mn0 6sp0 Lbu 1Cou yLu 1RAu wLu 1QMu xzu 1Q0u xXu 1PAu 13jB0 e00|29e5","America/Sao_Paulo|LMT -03 -02|36.s 30 20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2glwR.w HdKR.w 1cc0 1e10 1bX0 Ezd0 So0 1vA0 Mn0 1BB0 ML0 1BB0 zX0 pTd0 PX0 2ep0 nz0 1C10 zX0 1C10 LX0 1C10 Mn0 H210 Rb0 1tB0 IL0 1Fd0 FX0 1EN0 FX0 1HB0 Lz0 1EN0 Lz0 1C10 IL0 1HB0 Db0 1HB0 On0 1zd0 On0 1zd0 Lz0 1zd0 Rb0 1wN0 Wn0 1tB0 Rb0 1tB0 WL0 1tB0 Rb0 1zd0 On0 1HB0 FX0 1C10 Lz0 1Ip0 HX0 1zd0 On0 1HB0 IL0 1wp0 On0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1HB0 FX0|20e6","America/Scoresbysund|LMT -02 -01 +00|1r.Q 20 10 0|012132323232323232323232323232323232323232323232323232323232323232323232323232323232323232121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2a5Ww.8 2z5ew.8 1a00 1cK0 1cL0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 2pA0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|452","America/Sitka|LMT LMT PST PWT PPT PDT YST AKST AKDT|-eW.L 91.d 80 70 70 70 90 90 80|0123425252525252525252525252525252567878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787878787|-48Pzs.L 1jVwu 1EX0W.L 8x10 iy0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 co0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|90e2","America/St_Johns|LMT NST NDT NST NDT NWT NPT NDDT|3u.Q 3u.Q 2u.Q 3u 2u 2u 2u 1u|012121212121212121212121212121212121213434343434343435634343434343434343434343434343434343434343434343434343434343434343434343434343434343437343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-3tokt.8 1l020 14L0 1nB0 1in0 1gm0 Dz0 1JB0 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 1cL0 1fB0 19X0 1fB0 19X0 10O0 eKX.8 19X0 1iq0 WL0 1qN0 WL0 1qN0 WL0 1tB0 TX0 1tB0 WL0 1qN0 WL0 1qN0 7UHu itu 1tB0 WL0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1tB0 WL0 1ld0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14n1 1lb0 14p0 1nW0 11C0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zcX Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|11e4","America/Swift_Current|LMT MST MDT MWT MPT CST|7b.k 70 60 60 60 60|012134121212121212121215|-2AD4M.E uHdM.E 1in0 UGp0 8x20 ix0 1o10 17b0 1ip0 11z0 1o10 11z0 1o10 11z0 isN0 1cL0 3Cp0 1cL0 1cN0 11z0 1qN0 WL0 pMp0|16e3","America/Tegucigalpa|LMT CST CDT|5M.Q 60 50|01212121|-1WGGb.8 2ETcb.8 WL0 1qN0 WL0 GRd0 AL0|11e5","America/Thule|LMT AST ADT|4z.8 40 30|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2a5To.Q 31NBo.Q 1cL0 1cN0 1cL0 1fB0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|656","America/Vancouver|LMT PST PDT PWT PPT|8c.s 80 70 70 70|01213412121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3tofL.w 1nspL.w 1in0 UGp0 8x10 iy0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|23e5","America/Whitehorse|LMT YST YDT YWT YPT YDDT PST PDT MST|90.c 90 80 80 80 70 80 70 70|0121213415167676767676767676767676767676767676767676767676767676767676767676767676767676767678|-2MSeX.M GWpX.M 1in0 1o10 13V0 Ser0 8x00 iz0 LCL0 1fA0 LA0 ytd0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1z90|23e3","America/Yakutat|LMT LMT YST YWT YPT YDT AKST AKDT|-eF.5 9i.T 90 80 80 80 90 80|0123425252525252525252525252525252526767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676767676|-48Pzs.L 1jVwL.G 1EX1F.5 8x00 iz0 Vo10 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 cn0 10q0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|642","Antarctica/Casey|-00 +08 +11|0 -80 -b0|012121212121212121|-2q00 1DjS0 T90 40P0 KL0 blz0 3m10 1o30 14k0 1kr0 12l0 1o01 14kX 1lf1 14kX 1lf1 13bX|10","Antarctica/Davis|-00 +07 +05|0 -70 -50|01012121|-vyo0 iXt0 alj0 1D7v0 VB0 3Wn0 KN0|70","Pacific/Port_Moresby|LMT PMMT +10|-9M.E -9M.w -a0|012|-3D8VM.E AvA0.8|25e4","Antarctica/Macquarie|-00 AEST AEDT|0 -a0 -b0|0121012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-2OPc0 Fb40 1a00 4SK0 1ayy0 Lvs0 1cM0 1o00 Rc0 1wo0 Rc0 1wo0 U00 1wo0 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1qM0 WM0 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1wo0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 11A0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 11A0 1o00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1cM0 1cM0 3Co0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|1","Antarctica/Mawson|-00 +06 +05|0 -60 -50|012|-CEo0 2fyk0|60","Pacific/Auckland|LMT NZMT NZST NZST NZDT|-bD.4 -bu -cu -c0 -d0|012131313131313131313131313134343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-46jLD.4 2nEO9.4 Lz0 1tB0 11zu 1o0u 11zu 1o0u 11zu 1o0u 14nu 1lcu 14nu 1lcu 1lbu 11Au 1nXu 11Au 1nXu 11Au 1nXu 11Au 1nXu 11Au 1qLu WMu 1qLu 11Au 1n1bu IM0 1C00 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1qM0 14o0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1io0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00|14e5","Antarctica/Palmer|-00 -03 -04 -02|0 30 40 20|0121212121213121212121212121212121212121212121212121212121212121212121212121212121|-cao0 nD0 1vd0 SL0 1vd0 17z0 1cN0 1fz0 1cN0 1cL0 1cN0 asn0 Db0 jsN0 14N0 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 46n0 Ap0|40","Antarctica/Rothera|-00 -03|0 30|01|gOo0|130","Asia/Riyadh|LMT +03|-36.Q -30|01|-TvD6.Q|57e5","Antarctica/Troll|-00 +00 +02|0 0 -20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|1puo0 hd0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|40","Antarctica/Vostok|-00 +07 +05|0 -70 -50|01012|-tjA0 1rWh0 1Nj0 1aTv0|25","Europe/Berlin|LMT CET CEST CEMT|-R.s -10 -20 -30|012121212121212321212321212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-36RcR.s UbWR.s 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 kL0 Nc0 m10 WM0 1ao0 1cp0 dX0 jz0 Dd0 1io0 17c0 1fA0 1a00 1ehA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|41e5","Asia/Almaty|LMT +05 +06 +07|-57.M -50 -60 -70|012323232323232323232321232323232323232323232323232|-1Pc57.M eUo7.M 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0|15e5","Asia/Amman|LMT EET EEST +03|-2n.I -20 -30 -30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212123|-1yW2n.I 1HiMn.I KL0 1oN0 11b0 1oN0 11b0 1pd0 1dz0 1cp0 11b0 1op0 11b0 fO10 1db0 1e10 1cL0 1cN0 1cL0 1cN0 1fz0 1pd0 10n0 1ld0 14n0 1hB0 15b0 1ip0 19X0 1cN0 1cL0 1cN0 17b0 1ld0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1So0 y00 1fc0 1dc0 1co0 1dc0 1cM0 1cM0 1cM0 1o00 11A0 1lc0 17c0 1cM0 1cM0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 4bX0 Dd0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 LA0 1C00|25e5","Asia/Anadyr|LMT +12 +13 +14 +11|-bN.U -c0 -d0 -e0 -b0|01232121212121212121214121212121212121212121212121212121212141|-1PcbN.U eUnN.U 23CL0 1db0 2q10 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 2sp0 WM0|13e3","Asia/Aqtau|LMT +04 +05 +06|-3l.4 -40 -50 -60|012323232323232323232123232312121212121212121212|-1Pc3l.4 eUnl.4 24PX0 2pX0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0|15e4","Asia/Aqtobe|LMT +04 +05 +06|-3M.E -40 -50 -60|0123232323232323232321232323232323232323232323232|-1Pc3M.E eUnM.E 23CL0 3Db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0|27e4","Asia/Ashgabat|LMT +04 +05 +06|-3R.w -40 -50 -60|0123232323232323232323212|-1Pc3R.w eUnR.w 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0|41e4","Asia/Atyrau|LMT +03 +05 +06 +04|-3r.I -30 -50 -60 -40|01232323232323232323242323232323232324242424242|-1Pc3r.I eUor.I 24PW0 2pX0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 2sp0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0|","Asia/Baghdad|LMT BMT +03 +04|-2V.E -2V.A -30 -40|0123232323232323232323232323232323232323232323232323232|-3eLCV.E 18ao0.4 2ACnV.A 11b0 1cp0 1dz0 1dd0 1db0 1cN0 1cp0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1de0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0 1dc0 1dc0 1cM0 1dc0 1cM0 1dc0 1cM0 1dc0|66e5","Asia/Qatar|LMT +04 +03|-3q.8 -40 -30|012|-21Jfq.8 27BXq.8|96e4","Asia/Baku|LMT +03 +04 +05|-3j.o -30 -40 -50|01232323232323232323232123232323232323232323232323232323232323232|-1Pc3j.o 1jUoj.o WCL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 1cM0 9Je0 1o00 11z0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|27e5","Asia/Bangkok|LMT BMT +07|-6G.4 -6G.4 -70|012|-3D8SG.4 1C000|15e6","Asia/Barnaul|LMT +06 +07 +08|-5z -60 -70 -80|0123232323232323232323212323232321212121212121212121212121212121212|-21S5z pCnz 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 p90 LE0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3rd0|","Asia/Beirut|LMT EET EEST|-2m -20 -30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3D8Om 1BWom 1on0 1410 1db0 19B0 1in0 1ip0 WL0 1lQp0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 11b0 q6N0 En0 1oN0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 11b0 1op0 11b0 dA10 17b0 1iN0 17b0 1iN0 17b0 1iN0 17b0 1vB0 SL0 1mp0 13z0 1iN0 17b0 1iN0 17b0 1jd0 12n0 1a10 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0|22e5","Asia/Bishkek|LMT +05 +06 +07|-4W.o -50 -60 -70|012323232323232323232321212121212121212121212121212|-1Pc4W.o eUnW.o 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2e00 1tX0 17b0 1ip0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1cPu 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0|87e4","Asia/Brunei|LMT +0730 +08 +0820 +09|-7l.k -7u -80 -8k -90|0123232323232323242|-1KITl.k gDbP.k 6ynu AnE 1O0k AnE 1NAk AnE 1NAk AnE 1NAk AnE 1O0k AnE 1NAk AnE pAk 8Fz0|42e4","Asia/Kolkata|LMT HMT MMT IST +0630|-5R.s -5R.k -5l.a -5u -6u|01234343|-4Fg5R.s BKo0.8 1rDcw.a 1r2LP.a 1un0 HB0 7zX0|15e6","Asia/Chita|LMT +08 +09 +10|-7x.Q -80 -90 -a0|012323232323232323232321232323232323232323232323232323232323232312|-21Q7x.Q pAnx.Q 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3re0|33e4","Asia/Choibalsan|LMT +07 +08 +10 +09|-7C -70 -80 -a0 -90|0123434343434343434343434343434343434343434343424242|-2APHC 2UkoC cKn0 1da0 1dd0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 6hD0 11z0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 3Db0 h1f0 1cJ0 1cP0 1cJ0|38e3","Asia/Shanghai|LMT CST CDT|-85.H -80 -90|012121212121212121212121212121|-2M0U5.H Iuo5.H 18n0 OjB0 Rz0 11d0 1wL0 A10 8HX0 1G10 Tz0 1ip0 1jX0 1cN0 11b0 1oN0 aL0 1tU30 Rb0 1o10 11z0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0|23e6","Asia/Colombo|LMT MMT +0530 +06 +0630|-5j.o -5j.w -5u -60 -6u|012342432|-3D8Rj.o 13inX.Q 1rFbN.w 1zzu 7Apu 23dz0 11zu n3cu|22e5","Asia/Dhaka|LMT HMT +0630 +0530 +06 +07|-61.E -5R.k -6u -5u -60 -70|01232454|-3eLG1.E 26008.k 1unn.k HB0 m6n0 2kxbu 1i00|16e6","Asia/Damascus|LMT EET EEST +03|-2p.c -20 -30 -30|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212123|-21Jep.c Hep.c 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1xRB0 11X0 1oN0 10L0 1pB0 11b0 1oN0 10L0 1mp0 13X0 1oN0 11b0 1pd0 11b0 1oN0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 11b0 1oN0 11b0 1oN0 11b0 1pd0 11b0 1oN0 Nb0 1AN0 Nb0 bcp0 19X0 1gp0 19X0 3ld0 1xX0 Vd0 1Bz0 Sp0 1vX0 10p0 1dz0 1cN0 1cL0 1db0 1db0 1g10 1an0 1ap0 1db0 1fd0 1db0 1cN0 1db0 1dd0 1db0 1cp0 1dz0 1c10 1dX0 1cN0 1db0 1dd0 1db0 1cN0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1db0 1cN0 1db0 1cN0 19z0 1fB0 1qL0 11B0 1on0 Wp0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0|26e5","Asia/Dili|LMT +08 +09|-8m.k -80 -90|01212|-2le8m.k 1dnXm.k 1nfA0 Xld0|19e4","Asia/Dubai|LMT +04|-3F.c -40|01|-21JfF.c|39e5","Asia/Dushanbe|LMT +05 +06 +07|-4z.c -50 -60 -70|012323232323232323232321|-1Pc4z.c eUnz.c 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2hB0|76e4","Asia/Famagusta|LMT EET EEST +03|-2f.M -20 -30 -30|0121212121212121212121212121212121212121212121212121212121212121212121212121212121212312121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1Vc2f.M 2a3cf.M 1cL0 1qp0 Xz0 19B0 19X0 1fB0 1db0 1cp0 1cL0 1fB0 19X0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1o30 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 15U0 2Ks0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|","Asia/Gaza|LMT EET EEST IST IDT|-2h.Q -20 -30 -20 -30|012121212121212121212121212121212123434343434343434343434343434343121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2MBCh.Q 1Azeh.Q MM0 iM0 4JA0 10o0 1pA0 10M0 1pA0 16o0 1jA0 16o0 1jA0 pBa0 Vz0 1oN0 11b0 1oO0 10N0 1pz0 10N0 1pb0 10N0 1pb0 10N0 1pb0 10N0 1pz0 10N0 1pb0 10N0 1pb0 11d0 1oL0 dW0 hfB0 Db0 1fB0 Rb0 bXB0 gM0 8Q00 IM0 1wo0 TX0 1HB0 IL0 1s10 10n0 1o10 WL0 1zd0 On0 1ld0 11z0 1o10 14n0 1o10 14n0 1nd0 12n0 1nd0 Xz0 1q10 12n0 M10 C00 17c0 1io0 17c0 1io0 17c0 1o00 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 17c0 1io0 18N0 1bz0 19z0 1gp0 1610 1iL0 11z0 1o10 14o0 1lA1 SKX 1xd1 MKX 1AN0 1a00 1fA0 1cL0 1cN0 1nX0 1210 1nA0 1210 1qL0 WN0 1qL0 WN0 1qL0 11c0 1on0 11B0 1o00 11A0 1qo0 XA0 1qp0 1cN0 1cL0 17d0 1in0 14p0 1lb0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1lb0 14p0 1in0 17d0 1cL0 1cN0 19X0 e10 2L0 WN0 14n0 gN0 5z0 11B0 WL0 e10 bb0 11B0 TX0 e10 dX0 11B0 On0 gN0 gL0 11B0 Lz0 e10 pb0 WN0 IL0 e10 rX0 WN0 Db0 gN0 uL0 11B0 xz0 e10 An0 11B0 rX0 gN0 Db0 11B0 pb0 e10 Lz0 WN0 mn0 e10 On0 WN0 gL0 gN0 Rb0 11B0 bb0 e10 WL0 11B0 5z0 gN0 11z0 11B0 2L0 gN0 14n0 1fB0 1cL0 1a10 1fz0 14p0 1lb0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1nX0 14p0 1in0 17d0 1fz0 1a10 19X0 1fB0 17b0 e10 5z0 WN0 14n0 e10 8n0 WN0 WL0 gN0 bb0 11B0 Rb0 e10 gL0 11B0 Lz0 gN0 jz0 11B0 IL0 gN0 pb0 WN0 FX0 e10 uL0 WN0 An0 gN0 xz0 11B0 uL0 e10 Db0 11B0 rX0 e10 FX0 11B0 mn0 gN0 IL0 11B0 jz0 e10 Rb0 WN0 dX0 gN0 TX0 WN0 bb0 gN0 WL0 11B0 5z0 e10 14n0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0|18e5","Asia/Hebron|LMT EET EEST IST IDT|-2k.n -20 -30 -20 -30|01212121212121212121212121212121212343434343434343434343434343434312121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2MBCk.n 1Azek.n MM0 iM0 4JA0 10o0 1pA0 10M0 1pA0 16o0 1jA0 16o0 1jA0 pBa0 Vz0 1oN0 11b0 1oO0 10N0 1pz0 10N0 1pb0 10N0 1pb0 10N0 1pb0 10N0 1pz0 10N0 1pb0 10N0 1pb0 11d0 1oL0 dW0 hfB0 Db0 1fB0 Rb0 bXB0 gM0 8Q00 IM0 1wo0 TX0 1HB0 IL0 1s10 10n0 1o10 WL0 1zd0 On0 1ld0 11z0 1o10 14n0 1o10 14n0 1nd0 12n0 1nd0 Xz0 1q10 12n0 M10 C00 17c0 1io0 17c0 1io0 17c0 1o00 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 17c0 1io0 18N0 1bz0 19z0 1gp0 1610 1iL0 12L0 1mN0 14o0 1lc0 Tb0 1xd1 MKX bB0 cn0 1cN0 1a00 1fA0 1cL0 1cN0 1nX0 1210 1nA0 1210 1qL0 WN0 1qL0 WN0 1qL0 11c0 1on0 11B0 1o00 11A0 1qo0 XA0 1qp0 1cN0 1cL0 17d0 1in0 14p0 1lb0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1lb0 14p0 1in0 17d0 1cL0 1cN0 19X0 e10 2L0 WN0 14n0 gN0 5z0 11B0 WL0 e10 bb0 11B0 TX0 e10 dX0 11B0 On0 gN0 gL0 11B0 Lz0 e10 pb0 WN0 IL0 e10 rX0 WN0 Db0 gN0 uL0 11B0 xz0 e10 An0 11B0 rX0 gN0 Db0 11B0 pb0 e10 Lz0 WN0 mn0 e10 On0 WN0 gL0 gN0 Rb0 11B0 bb0 e10 WL0 11B0 5z0 gN0 11z0 11B0 2L0 gN0 14n0 1fB0 1cL0 1a10 1fz0 14p0 1lb0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1nX0 14p0 1in0 17d0 1fz0 1a10 19X0 1fB0 17b0 e10 5z0 WN0 14n0 e10 8n0 WN0 WL0 gN0 bb0 11B0 Rb0 e10 gL0 11B0 Lz0 gN0 jz0 11B0 IL0 gN0 pb0 WN0 FX0 e10 uL0 WN0 An0 gN0 xz0 11B0 uL0 e10 Db0 11B0 rX0 e10 FX0 11B0 mn0 gN0 IL0 11B0 jz0 e10 Rb0 WN0 dX0 gN0 TX0 WN0 bb0 gN0 WL0 11B0 5z0 e10 14n0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0|25e4","Asia/Ho_Chi_Minh|LMT PLMT +07 +08 +09|-76.u -76.u -70 -80 -90|0123423232|-2yC76.u bK00 1h7b6.u 5lz0 18o0 3Oq0 k5b0 aW00 BAM0|90e5","Asia/Hong_Kong|LMT HKT HKST HKWT JST|-7A.G -80 -90 -8u -90|0123412121212121212121212121212121212121212121212121212121212121212121|-2CFH0 1taO0 Hc0 xUu 9tBu 11z0 1tDu Rc0 1wo0 11A0 1cM0 11A0 1o00 11A0 1o00 11A0 1o00 14o0 1o00 11A0 1nX0 U10 1tz0 U10 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 U10 1tz0 U10 1wn0 Rd0 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 17d0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 s10 1Vz0 1cN0 1cL0 1cN0 1cL0 6fd0 14n0|73e5","Asia/Hovd|LMT +06 +07 +08|-66.A -60 -70 -80|012323232323232323232323232323232323232323232323232|-2APG6.A 2Uko6.A cKn0 1db0 1dd0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 6hD0 11z0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 kEp0 1cJ0 1cP0 1cJ0|81e3","Asia/Irkutsk|LMT IMT +07 +08 +09|-6V.5 -6V.5 -70 -80 -90|012343434343434343434343234343434343434343434343434343434343434343|-3D8SV.5 1Bxc0 pjXV.5 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|60e4","Europe/Istanbul|LMT IMT EET EEST +03 +04|-1T.Q -1U.U -20 -30 -30 -40|01232323232323232323232323232323232323232323232345423232323232323232323232323232323232323232323232323232323232323234|-3D8NT.Q 1ePXW.U dzzU.U 11b0 8tB0 1on0 1410 1db0 19B0 1in0 3Rd0 Un0 1oN0 11b0 zSN0 CL0 mp0 1Vz0 1gN0 8yn0 1yp0 ML0 1kp0 17b0 1ip0 17b0 1fB0 19X0 1ip0 19X0 1ip0 17b0 qdB0 38L0 1jd0 Tz0 l6O0 11A0 WN0 1qL0 TB0 1tX0 U10 1tz0 11B0 1in0 17d0 z90 cne0 pb0 2Cp0 1800 14o0 1dc0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1a00 1fA0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WO0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 Xc0 1qo0 WM0 1qM0 11A0 1o00 1200 1nA0 11A0 1tA0 U00 15w0|13e6","Asia/Jakarta|LMT BMT +0720 +0730 +09 +08 WIB|-77.c -77.c -7k -7u -90 -80 -70|012343536|-49jH7.c 2hiLL.c luM0 mPzO 8vWu 6kpu 4PXu xhcu|31e6","Asia/Jayapura|LMT +09 +0930 WIT|-9m.M -90 -9u -90|0123|-1uu9m.M sMMm.M L4nu|26e4","Asia/Jerusalem|LMT JMT IST IDT IDDT|-2k.S -2k.E -20 -30 -40|012323232323232432323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3D8Ok.S 1wvA0.e SyOk.E MM0 iM0 4JA0 10o0 1pA0 10M0 1pA0 16o0 1jA0 16o0 1jA0 3LA0 Eo0 oo0 1co0 1dA0 16o0 10M0 1jc0 1tA0 14o0 1cM0 1a00 11A0 1Nc0 Ao0 1Nc0 Ao0 1Ko0 LA0 1o00 WM0 EQK0 Db0 1fB0 Rb0 bXB0 gM0 8Q00 IM0 1wo0 TX0 1HB0 IL0 1s10 10n0 1o10 WL0 1zd0 On0 1ld0 11z0 1o10 14n0 1o10 14n0 1nd0 12n0 1nd0 Xz0 1q10 12n0 1hB0 1dX0 1ep0 1aL0 1eN0 17X0 1nf0 11z0 1tB0 19W0 1e10 17b0 1ep0 1gL0 18N0 1fz0 1eN0 17b0 1gq0 1gn0 19d0 1dz0 1c10 17X0 1hB0 1gn0 19d0 1dz0 1c10 17X0 1kp0 1dz0 1c10 1aL0 1eN0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0 10N0 1oL0|81e4","Asia/Kabul|LMT +04 +0430|-4A.M -40 -4u|012|-3eLEA.M 2dTcA.M|46e5","Asia/Kamchatka|LMT +11 +12 +13|-ay.A -b0 -c0 -d0|012323232323232323232321232323232323232323232323232323232323212|-1SLKy.A ivXy.A 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 2sp0 WM0|18e4","Asia/Karachi|LMT +0530 +0630 +05 PKT PKST|-4s.c -5u -6u -50 -50 -60|012134545454|-2xoss.c 1qOKW.c 7zX0 eup0 LqMu 1fy00 1cL0 dK10 11b0 1610 1jX0|24e6","Asia/Urumqi|LMT +06|-5O.k -60|01|-1GgtO.k|32e5","Asia/Kathmandu|LMT +0530 +0545|-5F.g -5u -5J|012|-21JhF.g 2EGMb.g|12e5","Asia/Khandyga|LMT +08 +09 +10 +11|-92.d -80 -90 -a0 -b0|0123232323232323232323212323232323232323232323232343434343434343432|-21Q92.d pAp2.d 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 qK0 yN0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 17V0 7zD0|66e2","Asia/Krasnoyarsk|LMT +06 +07 +08|-6b.q -60 -70 -80|01232323232323232323232123232323232323232323232323232323232323232|-21Hib.q prAb.q 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|10e5","Asia/Kuala_Lumpur|LMT SMT +07 +0720 +0730 +09 +08|-6T.p -6T.p -70 -7k -7u -90 -80|01234546|-2M0ST.p aIM0 17anT.p l5XE 17bO 8Fyu 1so10|71e5","Asia/Macau|LMT CST +09 +10 CDT|-7y.a -80 -90 -a0 -90|012323214141414141414141414141414141414141414141414141414141414141414141|-2CFHy.a 1uqKy.a PX0 1kn0 15B0 11b0 4Qq0 1oM0 11c0 1ko0 1u00 11A0 1cM0 11c0 1o00 11A0 1o00 11A0 1oo0 1400 1o00 11A0 1o00 U00 1tA0 U00 1wo0 Rc0 1wru U10 1tz0 U10 1tz0 U10 1tz0 U10 1wn0 Rd0 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 17d0 1cK0 1cO0 1cK0 1cO0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 s10 1Vz0 1cN0 1cL0 1cN0 1cL0 6fd0 14n0|57e4","Asia/Magadan|LMT +10 +11 +12|-a3.c -a0 -b0 -c0|012323232323232323232321232323232323232323232323232323232323232312|-1Pca3.c eUo3.c 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3Cq0|95e3","Asia/Makassar|LMT MMT +08 +09 WITA|-7V.A -7V.A -80 -90 -80|01234|-21JjV.A vfc0 myLV.A 8ML0|15e5","Asia/Manila|LMT LMT PST PDT JST|fU -84 -80 -90 -90|01232423232|-54m84 2clc0 1vfc4 AL0 cK10 65X0 mXB0 vX0 VK10 1db0|24e6","Asia/Nicosia|LMT EET EEST|-2d.s -20 -30|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-1Vc2d.s 2a3cd.s 1cL0 1qp0 Xz0 19B0 19X0 1fB0 1db0 1cp0 1cL0 1fB0 19X0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1o30 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|32e4","Asia/Novokuznetsk|LMT +06 +07 +08|-5M.M -60 -70 -80|012323232323232323232321232323232323232323232323232323232323212|-1PctM.M eULM.M 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 2sp0 WM0|55e4","Asia/Novosibirsk|LMT +06 +07 +08|-5v.E -60 -70 -80|0123232323232323232323212323212121212121212121212121212121212121212|-21Qnv.E pAFv.E 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 ml0 Os0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 4eN0|15e5","Asia/Omsk|LMT +05 +06 +07|-4R.u -50 -60 -70|01232323232323232323232123232323232323232323232323232323232323232|-224sR.u pMLR.u 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|12e5","Asia/Oral|LMT +03 +05 +06 +04|-3p.o -30 -50 -60 -40|01232323232323232424242424242424242424242424242|-1Pc3p.o eUop.o 23CK0 3Db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 2pB0 1cM0 1fA0 1cM0 1cM0 IM0 1EM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0|27e4","Asia/Pontianak|LMT PMT +0730 +09 +08 WITA WIB|-7h.k -7h.k -7u -90 -80 -80 -70|012324256|-2ua7h.k XE00 munL.k 8Rau 6kpu 4PXu xhcu Wqnu|23e4","Asia/Pyongyang|LMT KST JST KST|-8n -8u -90 -90|012313|-2um8n 97XR 1lTzu 2Onc0 6BA0|29e5","Asia/Qostanay|LMT +04 +05 +06|-4e.s -40 -50 -60|012323232323232323232123232323232323232323232323|-1Pc4e.s eUoe.s 23CL0 3Db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0|","Asia/Qyzylorda|LMT +04 +05 +06|-4l.Q -40 -50 -60|01232323232323232323232323232323232323232323232|-1Pc4l.Q eUol.Q 23CL0 3Db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 3ao0 1EM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 zQl0|73e4","Asia/Rangoon|LMT RMT +0630 +09|-6o.L -6o.L -6u -90|01232|-3D8So.L 1BnA0 SmnS.L 7j9u|48e5","Asia/Sakhalin|LMT +09 +11 +12 +10|-9u.M -90 -b0 -c0 -a0|01232323232323232323232423232323232424242424242424242424242424242|-2AGVu.M 1BoMu.M 1qFa0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 2pB0 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3rd0|58e4","Asia/Samarkand|LMT +04 +05 +06|-4r.R -40 -50 -60|01232323232323232323232|-1Pc4r.R eUor.R 23CL0 3Db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0|36e4","Asia/Seoul|LMT KST JST KST KDT KDT|-8r.Q -8u -90 -90 -a0 -9u|012343434343151515151515134343|-2um8r.Q 97XV.Q 1m1zu 6CM0 Fz0 1kN0 14n0 1kN0 14L0 1zd0 On0 69B0 2I0u OL0 1FB0 Rb0 1qN0 TX0 1tB0 TX0 1tB0 TX0 1tB0 TX0 2ap0 12FBu 11A0 1o00 11A0|23e6","Asia/Srednekolymsk|LMT +10 +11 +12|-ae.Q -a0 -b0 -c0|01232323232323232323232123232323232323232323232323232323232323232|-1Pcae.Q eUoe.Q 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|35e2","Asia/Taipei|LMT CST JST CDT|-86 -80 -90 -90|012131313131313131313131313131313131313131|-30bk6 1FDc6 joM0 1yo0 Tz0 1ip0 1jX0 1cN0 11b0 1oN0 11b0 1oN0 11b0 1oN0 11b0 10N0 1BX0 10p0 1pz0 10p0 1pz0 10p0 1db0 1dd0 1db0 1cN0 1db0 1cN0 1db0 1cN0 1db0 1BB0 ML0 1Bd0 ML0 uq10 1db0 1cN0 1db0 97B0 AL0|74e5","Asia/Tashkent|LMT +05 +06 +07|-4B.b -50 -60 -70|012323232323232323232321|-1Pc4B.b eUnB.b 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0|23e5","Asia/Tbilisi|LMT TBMT +03 +04 +05|-2X.b -2X.b -30 -40 -50|01234343434343434343434323232343434343434343434323|-3D8OX.b 1LUM0 1jUnX.b WCL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 1cK0 1cL0 1cN0 1cL0 1cN0 2pz0 1cL0 1fB0 3Nz0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 An0 Os0 WM0|11e5","Asia/Tehran|LMT TMT +0330 +0430 +04 +05|-3p.I -3p.I -3u -4u -40 -50|012345423232323232323232323232323232323232323232323232323232323232323232|-2btDp.I Llc0 1FHaT.I 1pc0 120u Rc0 XA0 Wou JX0 1dB0 1en0 pNB0 UL0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 64p0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0|14e6","Asia/Thimphu|LMT +0530 +06|-5W.A -5u -60|012|-Su5W.A 1BGMs.A|79e3","Asia/Tokyo|LMT JST JDT|-9i.X -90 -a0|0121212121|-3jE90 2qSo0 Rc0 1lc0 14o0 1zc0 Oo0 1zc0 Oo0|38e6","Asia/Tomsk|LMT +06 +07 +08|-5D.P -60 -70 -80|0123232323232323232323212323232323232323232323212121212121212121212|-21NhD.P pxzD.P 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 co0 1bB0 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3Qp0|10e5","Asia/Ulaanbaatar|LMT +07 +08 +09|-77.w -70 -80 -90|012323232323232323232323232323232323232323232323232|-2APH7.w 2Uko7.w cKn0 1db0 1dd0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 6hD0 11z0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 kEp0 1cJ0 1cP0 1cJ0|12e5","Asia/Ust-Nera|LMT +08 +09 +12 +11 +10|-9w.S -80 -90 -c0 -b0 -a0|012343434343434343434345434343434343434343434343434343434343434345|-21Q9w.S pApw.S 23CL0 1d90 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 17V0 7zD0|65e2","Asia/Vladivostok|LMT +09 +10 +11|-8L.v -90 -a0 -b0|01232323232323232323232123232323232323232323232323232323232323232|-1SJIL.v itXL.v 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|60e4","Asia/Yakutsk|LMT +08 +09 +10|-8C.W -80 -90 -a0|01232323232323232323232123232323232323232323232323232323232323232|-21Q8C.W pAoC.W 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|28e4","Asia/Yekaterinburg|LMT PMT +04 +05 +06|-42.x -3J.5 -40 -50 -60|012343434343434343434343234343434343434343434343434343434343434343|-2ag42.x 7mQh.s qBvJ.5 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|14e5","Asia/Yerevan|LMT +03 +04 +05|-2W -30 -40 -50|0123232323232323232323212121212323232323232323232323232323232|-1Pc2W 1jUnW WCL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 2pB0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 4RX0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0|13e5","Atlantic/Azores|LMT HMT -02 -01 +00 WET|1G.E 1S.w 20 10 0 0|01232323232323232323232323232323232323232323234323432343234323232323232323232323232323232323232323232343434343434343434343434343434345434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-3tomh.k 18aoh.k aPX0 Sp0 LX0 1vc0 Tc0 1uM0 SM0 1vc0 Tc0 1vc0 SM0 1vc0 6600 1co0 3E00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 3I00 17c0 1cM0 1cM0 3Fc0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 1tA0 1cM0 1dc0 1400 gL0 IM0 s10 U00 dX0 Rc0 pd0 Rc0 gL0 Oo0 pd0 Rc0 gL0 Oo0 pd0 14o0 1cM0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 qIl0 1cM0 1fA0 1cM0 1cM0 1cN0 1cL0 1cN0 1cM0 1cM0 1cM0 1cM0 1cN0 1cL0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cL0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|25e4","Atlantic/Bermuda|LMT BMT BST AST ADT|4j.i 4j.i 3j.i 40 30|0121213434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-3eLvE.G 16mo0 1bb0 1i10 11X0 ru30 thbE.G 1PX0 11B0 1tz0 Rd0 1zb0 Op0 1zb0 3I10 Lz0 1EN0 FX0 1HB0 FX0 1Kp0 Db0 1Kp0 Db0 1Kp0 FX0 93d0 11z0 GAp0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|65e3","Atlantic/Canary|LMT -01 WET WEST|11.A 10 0 -10|01232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-1UtaW.o XPAW.o 1lAK0 1a10 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|54e4","Atlantic/Cape_Verde|LMT -02 -01|1y.4 20 10|01212|-2ldW0 1eEo0 7zX0 1djf0|50e4","Atlantic/Faroe|LMT WET WEST|r.4 0 -10|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2uSnw.U 2Wgow.U 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|49e3","Atlantic/Madeira|LMT FMT -01 +00 +01 WET WEST|17.A 17.A 10 0 -10 0 -10|01232323232323232323232323232323232323232323234323432343234323232323232323232323232323232323232323232565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565656565|-3tomQ.o 18anQ.o aPX0 Sp0 LX0 1vc0 Tc0 1uM0 SM0 1vc0 Tc0 1vc0 SM0 1vc0 6600 1co0 3E00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 3I00 17c0 1cM0 1cM0 3Fc0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 1tA0 1cM0 1dc0 1400 gL0 IM0 s10 U00 dX0 Rc0 pd0 Rc0 gL0 Oo0 pd0 Rc0 gL0 Oo0 pd0 14o0 1cM0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 qIl0 1cM0 1fA0 1cM0 1cM0 1cN0 1cL0 1cN0 1cM0 1cM0 1cM0 1cM0 1cN0 1cL0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|27e4","Atlantic/South_Georgia|LMT -02|2q.8 20|01|-3eLxx.Q|30","Atlantic/Stanley|LMT SMT -04 -03 -02|3P.o 3P.o 40 30 20|0123232323232323434323232323232323232323232323232323232323232323232323|-3eLw8.A S200 12bA8.A 19X0 1fB0 19X0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1fB0 Cn0 1Cc10 WL0 1qL0 U10 1tz0 2mN0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1tz0 U10 1tz0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1tz0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qL0 WN0 1qN0 U10 1wn0 Rd0 1wn0 U10 1tz0 U10 1tz0 U10 1tz0 U10 1tz0 U10 1wn0 U10 1tz0 U10 1tz0 U10|21e2","Australia/Sydney|LMT AEST AEDT|-a4.Q -a0 -b0|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-32oW4.Q RlC4.Q xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00 17c00 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 14o0 1o00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1tA0 WM0 1tA0 U00 1tA0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 11A0 1o00 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|40e5","Australia/Adelaide|LMT ACST ACST ACDT|-9e.k -90 -9u -au|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323|-32oVe.k ak0e.k H1Bu xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00 17c00 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 Oo0 1zc0 WM0 1qM0 Rc0 1zc0 U00 1tA0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|11e5","Australia/Brisbane|LMT AEST AEDT|-ac.8 -a0 -b0|012121212121212121|-32Bmc.8 Ry2c.8 xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00 17c00 LA0 H1A0 Oo0 1zc0 Oo0 1zc0 Oo0|20e5","Australia/Broken_Hill|LMT AEST ACST ACST ACDT|-9p.M -a0 -90 -9u -au|0123434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-32oVp.M 3Lzp.M 6wp0 H1Bu xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00 17c00 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 14o0 1o00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1tA0 WM0 1tA0 U00 1tA0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|18e3","Australia/Hobart|LMT AEST AEDT|-9N.g -a0 -b0|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-3109N.g Pk1N.g 1a00 1qM0 Oo0 1zc0 Oo0 TAo0 yM0 1cM0 1cM0 1fA0 1a00 VfA0 1cM0 1o00 Rc0 1wo0 Rc0 1wo0 U00 1wo0 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 11A0 1qM0 WM0 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1wo0 WM0 1tA0 WM0 1tA0 U00 1tA0 U00 1tA0 11A0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 11A0 1o00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|21e4","Australia/Darwin|LMT ACST ACST ACDT|-8H.k -90 -9u -au|01232323232|-32oUH.k ajXH.k H1Bu xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00|12e4","Australia/Eucla|LMT +0845 +0945|-8z.s -8J -9J|01212121212121212121|-30nIz.s PkpO.s xc0 10jc0 yM0 1cM0 1cM0 1gSo0 Oo0 l5A0 Oo0 iJA0 G00 zU00 IM0 1qM0 11A0 1o00 11A0|368","Australia/Lord_Howe|LMT AEST +1030 +1130 +11|-aA.k -a0 -au -bu -b0|01232323232424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424242424|-32oWA.k 3tzAA.k 1zdu Rb0 1zd0 On0 1zd0 On0 1zd0 On0 1zd0 TXu 1qMu WLu 1tAu WLu 1tAu TXu 1tAu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1zcu Onu 1zcu Onu 1zcu 11zu 1o0u 11zu 1o0u 11zu 1o0u 11zu 1qMu WLu 11Au 1nXu 1qMu 11zu 1o0u 11zu 1o0u 11zu 1qMu WLu 1qMu 11zu 1o0u WLu 1qMu 14nu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1fzu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu|347","Australia/Lindeman|LMT AEST AEDT|-9T.U -a0 -b0|0121212121212121212121|-32BlT.U Ry1T.U xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00 17c00 LA0 H1A0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0|10","Australia/Melbourne|LMT AEST AEDT|-9D.Q -a0 -b0|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212|-32oVD.Q RlBD.Q xc0 10jc0 yM0 1cM0 1cM0 1fA0 1a00 17c00 LA0 1C00 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 U00 1qM0 WM0 1qM0 11A0 1tA0 U00 1tA0 U00 1tA0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 11A0 1o00 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 14o0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|39e5","Australia/Perth|LMT AWST AWDT|-7H.o -80 -90|01212121212121212121|-30nHH.o PkpH.o xc0 10jc0 yM0 1cM0 1cM0 1gSo0 Oo0 l5A0 Oo0 iJA0 G00 zU00 IM0 1qM0 11A0 1o00 11A0|18e5","CET|CET CEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 16M0 1gMM0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|","Pacific/Easter|LMT EMT -07 -06 -05|7h.s 7h.s 70 60 50|0123232323232323232323232323234343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434|-3eLsG.w 1HRc0 1s4IG.w WL0 1zd0 On0 1ip0 11z0 1o10 11z0 1qN0 WL0 1ld0 14n0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 2pA0 11z0 1o10 11z0 1qN0 WL0 1qN0 WL0 1qN0 1cL0 1cN0 11z0 1o10 11z0 1qN0 WL0 1fB0 19X0 1qN0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1ip0 1fz0 1fB0 11z0 1qN0 WL0 1qN0 WL0 1qN0 WL0 1qN0 11z0 1o10 11z0 1o10 11z0 1qN0 WL0 1qN0 17b0 1ip0 11z0 1o10 19X0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 46n0 Ap0 1Nb0 Ap0 1Nb0 Ap0 1zb0 11B0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0|30e2","CST6CDT|CST CDT CWT CPT|60 50 50 50|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261s0 1nX0 11B0 1nX0 SgN0 8x30 iw0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","EET|EET EEST|-20 -30|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|hDB0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|","Europe/Dublin|LMT DMT IST GMT BST IST|p.l p.l -y.D 0 -10 -10|012343434343435353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353535353|-3BHby.D 1ra20 Rc0 1fzy.D 14M0 1fc0 1g00 1co0 1dc0 1co0 1oo0 1400 1dc0 19A0 1io0 1io0 WM0 1o00 14o0 1o00 17c0 1io0 17c0 1fA0 1a00 1lc0 17c0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1cM0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1io0 1qM0 Dc0 g600 14o0 1wo0 17c0 1io0 11A0 1o00 17c0 1fA0 1a00 1fA0 1cM0 1fA0 1a00 17c0 1fA0 1a00 1io0 17c0 1lc0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1a00 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1tA0 IM0 90o0 U00 1tA0 U00 1tA0 U00 1tA0 U00 1tA0 WM0 1qM0 WM0 1qM0 WM0 1tA0 U00 1tA0 U00 1tA0 11z0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 14o0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|12e5","EST|EST|50|0||","EST5EDT|EST EDT EWT EPT|50 40 40 40|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261t0 1nX0 11B0 1nX0 SgN0 8x40 iv0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","Etc/GMT-0|GMT|0|0||","Etc/GMT-1|+01|-10|0||","Etc/GMT-10|+10|-a0|0||","Etc/GMT-11|+11|-b0|0||","Etc/GMT-12|+12|-c0|0||","Etc/GMT-13|+13|-d0|0||","Etc/GMT-14|+14|-e0|0||","Etc/GMT-2|+02|-20|0||","Etc/GMT-3|+03|-30|0||","Etc/GMT-4|+04|-40|0||","Etc/GMT-5|+05|-50|0||","Etc/GMT-6|+06|-60|0||","Etc/GMT-7|+07|-70|0||","Etc/GMT-8|+08|-80|0||","Etc/GMT-9|+09|-90|0||","Etc/GMT+1|-01|10|0||","Etc/GMT+10|-10|a0|0||","Etc/GMT+11|-11|b0|0||","Etc/GMT+12|-12|c0|0||","Etc/GMT+2|-02|20|0||","Etc/GMT+3|-03|30|0||","Etc/GMT+4|-04|40|0||","Etc/GMT+5|-05|50|0||","Etc/GMT+6|-06|60|0||","Etc/GMT+7|-07|70|0||","Etc/GMT+8|-08|80|0||","Etc/GMT+9|-09|90|0||","Etc/UTC|UTC|0|0||","Europe/Brussels|LMT BMT WET CET CEST WEST|-h.u -h.u 0 -10 -20 -10|012343434325252525252525252525252525252525252525252525434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-3D8Mh.u u1Ah.u SO00 3zX0 11c0 1iO0 11A0 1o00 11A0 my0 Ic0 1qM0 Rc0 1EM0 UM0 1u00 10o0 1io0 1io0 17c0 1a00 1fA0 1cM0 1cM0 1io0 17c0 1fA0 1a00 1io0 1a30 1io0 17c0 1fA0 1a00 1io0 17c0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 y00 5Wn0 WM0 1fA0 1cM0 16M0 1iM0 16M0 1C00 Uo0 1eeo0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|21e5","Europe/Andorra|LMT WET CET CEST|-6.4 0 -10 -20|0123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-2M0M6.4 1Pnc6.4 1xIN0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|79e3","Europe/Astrakhan|LMT +03 +04 +05|-3c.c -30 -40 -50|012323232323232323212121212121212121212121212121212121212121212|-1Pcrc.c eUMc.c 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 2pB0 1cM0 1fA0 1cM0 3Co0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3rd0|10e5","Europe/Athens|LMT AMT EET EEST CEST CET|-1y.Q -1y.Q -20 -30 -20 -10|0123234545232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-30SNy.Q OMM1 CNbx.Q mn0 kU10 9b0 3Es0 Xa0 1fb0 1dd0 k3X0 Nz0 SCp0 1vc0 SO0 1cM0 1a00 1ao0 1fc0 1a10 1fG0 1cg0 1dX0 1bX0 1cQ0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|35e5","Europe/London|LMT GMT BST BDST|1.f 0 -10 -20|01212121212121212121212121212121212121212121212121232323232321212321212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-4VgnW.J 2KHdW.J Rc0 1fA0 14M0 1fc0 1g00 1co0 1dc0 1co0 1oo0 1400 1dc0 19A0 1io0 1io0 WM0 1o00 14o0 1o00 17c0 1io0 17c0 1fA0 1a00 1lc0 17c0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1cM0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1io0 1qM0 Dc0 2Rz0 Dc0 1zc0 Oo0 1zc0 Rc0 1wo0 17c0 1iM0 FA0 xB0 1fA0 1a00 14o0 bb0 LA0 xB0 Rc0 1wo0 11A0 1o00 17c0 1fA0 1a00 1fA0 1cM0 1fA0 1a00 17c0 1fA0 1a00 1io0 17c0 1lc0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1a00 1qM0 WM0 1qM0 11A0 1o00 WM0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1tA0 IM0 90o0 U00 1tA0 U00 1tA0 U00 1tA0 U00 1tA0 WM0 1qM0 WM0 1qM0 WM0 1tA0 U00 1tA0 U00 1tA0 11z0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 14o0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|10e6","Europe/Belgrade|LMT CET CEST|-1m -10 -20|012121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3topm 2juLm 3IP0 WM0 1fA0 1cM0 1cM0 1rc0 Qo0 1vmo0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|12e5","Europe/Prague|LMT PMT CET CEST GMT|-V.I -V.I -10 -20 0|0123232323232323232423232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-4QbAV.I 1FDc0 XPaV.I 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 1cM0 1qM0 11c0 mp0 xA0 mn0 17c0 1io0 17c0 1fc0 1ao0 1bNc0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|13e5","Europe/Bucharest|LMT BMT EET EEST|-1I.o -1I.o -20 -30|01232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3awpI.o 1AU00 20LI.o RA0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1Axc0 On0 1fA0 1a10 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cK0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cL0 1cN0 1cL0 1fB0 1nX0 11E0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|19e5","Europe/Budapest|LMT CET CEST|-1g.k -10 -20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-3cK1g.k 124Lg.k 11d0 1iO0 11A0 1o00 11A0 1oo0 11c0 1lc0 17c0 O1V0 3Nf0 WM0 1fA0 1cM0 1cM0 1oJ0 1dd0 1020 1fX0 1cp0 1cM0 1cM0 1cM0 1fA0 1a00 bhy0 Rb0 1wr0 Rc0 1C00 LA0 1C00 LA0 SNW0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cO0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|17e5","Europe/Zurich|LMT BMT CET CEST|-y.8 -t.K -10 -20|0123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-4HyMy.8 1Dw04.m 1SfAt.K 11A0 1o00 11A0 1xG10 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|38e4","Europe/Chisinau|LMT CMT BMT EET EEST CEST CET MSK MSD|-1T.k -1T -1I.o -20 -30 -20 -10 -30 -40|0123434343434343434345656578787878787878787878434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343434343|-3D8NT.k 1wNA0.k wGMa.A 20LI.o RA0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 27A0 2en0 39g0 WM0 1fA0 1cM0 V90 1t7z0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 gL0 WO0 1cM0 1cM0 1cK0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1nX0 11D0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|67e4","Europe/Gibraltar|LMT GMT BST BDST CET CEST|l.o 0 -10 -20 -10 -20|0121212121212121212121212121212121212121212121212123232323232121232121212121212121212145454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-3BHbC.A 1ra1C.A Rc0 1fA0 14M0 1fc0 1g00 1co0 1dc0 1co0 1oo0 1400 1dc0 19A0 1io0 1io0 WM0 1o00 14o0 1o00 17c0 1io0 17c0 1fA0 1a00 1lc0 17c0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1cM0 1io0 17c0 1fA0 1a00 1io0 17c0 1io0 17c0 1fA0 1a00 1io0 1qM0 Dc0 2Rz0 Dc0 1zc0 Oo0 1zc0 Rc0 1wo0 17c0 1iM0 FA0 xB0 1fA0 1a00 14o0 bb0 LA0 xB0 Rc0 1wo0 11A0 1o00 17c0 1fA0 1a00 1fA0 1cM0 1fA0 1a00 17c0 1fA0 1a00 1io0 17c0 1lc0 17c0 1fA0 10Jz0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|30e3","Europe/Helsinki|LMT HMT EET EEST|-1D.N -1D.N -20 -30|01232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3H0ND.N 1Iu00 OULD.N 1dA0 1xGq0 1cM0 1cM0 1cM0 1cN0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|12e5","Europe/Kaliningrad|LMT CET CEST EET EEST MSK MSD +03|-1m -10 -20 -20 -30 -30 -40 -30|012121212121212343565656565656565654343434343434343434343434343434343434343434373|-36Rdm UbXm 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 390 7A0 1en0 12N0 1pbb0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|44e4","Europe/Kiev|LMT KMT EET MSK CEST CET MSD EEST|-22.4 -22.4 -20 -30 -20 -10 -40 -30|01234545363636363636363636367272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272727272|-3D8O2.4 1LUM0 eUo2.4 rnz0 2Hg0 WM0 1fA0 da0 1v4m0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 Db0 3220 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o10 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|34e5","Europe/Kirov|LMT +03 +04 +05 MSD MSK MSK|-3i.M -30 -40 -50 -40 -30 -40|0123232323232323232454524545454545454545454545454545454545454565|-22WM0 qH90 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 2pz0 1cN0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|48e4","Europe/Lisbon|LMT WET WEST WEMT CET CEST|A.J 0 -10 -20 -10 -20|01212121212121212121212121212121212121212121232123212321232121212121212121212121212121212121212121214121212121212121212121212121212124545454212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2le00 aPX0 Sp0 LX0 1vc0 Tc0 1uM0 SM0 1vc0 Tc0 1vc0 SM0 1vc0 6600 1co0 3E00 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 3I00 17c0 1cM0 1cM0 3Fc0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Dc0 1tA0 1cM0 1dc0 1400 gL0 IM0 s10 U00 dX0 Rc0 pd0 Rc0 gL0 Oo0 pd0 Rc0 gL0 Oo0 pd0 14o0 1cM0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 pvy0 1cM0 1cM0 1fA0 1cM0 1cM0 1cN0 1cL0 1cN0 1cM0 1cM0 1cM0 1cM0 1cN0 1cL0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|27e5","Europe/Madrid|LMT WET WEST WEMT CET CEST|e.I 0 -10 -20 -10 -20|0121212121212121212321454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454|-2M0M0 G5z0 19B0 1cL0 1dd0 b1z0 18p0 3HX0 17d0 1fz0 1a10 1io0 1a00 1in0 17d0 iIn0 Hd0 1cL0 bb0 1200 2s20 14n0 5aL0 Mp0 1vz0 17d0 1in0 17d0 1in0 17d0 1in0 17d0 6hX0 11B0 XHX0 1a10 1fz0 1a10 19X0 1cN0 1fz0 1a10 1fC0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|62e5","Europe/Malta|LMT CET CEST|-W.4 -10 -20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-35rcW.4 SXzW.4 Lz0 1cN0 1db0 1410 1on0 Wp0 1qL0 17d0 1cL0 M3B0 5M20 WM0 1fA0 1co0 17c0 1iM0 16m0 1de0 1lc0 14m0 1lc0 WO0 1qM0 GTW0 On0 1C10 LA0 1C00 LA0 1EM0 LA0 1C00 LA0 1zc0 Oo0 1C00 Oo0 1co0 1cM0 1lA0 Xc0 1qq0 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 11z0 1iN0 19z0 1fB0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|42e4","Europe/Minsk|LMT MMT EET MSK CEST CET MSD EEST +03|-1O.g -1O -20 -30 -20 -10 -40 -30 -30|012345454363636363636363636372727272727272727272727272727272727272728|-3D8NO.g 1LUM0.g eUnO qNX0 3gQ0 WM0 1fA0 1cM0 Al0 1tsn0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 3Fc0 1cN0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0|19e5","Europe/Paris|LMT PMT WET WEST CEST CET WEMT|-9.l -9.l 0 -10 -20 -10 -20|01232323232323232323232323232323232323232323232323234545463654545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545|-3bQ09.l MDA0 cNb9.l HA0 19A0 1iM0 11c0 1oo0 Wo0 1rc0 QM0 1EM0 UM0 1u00 10o0 1io0 1wo0 Rc0 1a00 1fA0 1cM0 1cM0 1io0 17c0 1fA0 1a00 1io0 1a00 1io0 17c0 1fA0 1a00 1io0 17c0 1cM0 1cM0 1a00 1io0 1cM0 1cM0 1a00 1fA0 1io0 17c0 1cM0 1cM0 1a00 1fA0 1io0 1qM0 Df0 Ik0 5M30 WM0 1fA0 1cM0 Vx0 hB0 1aq0 16M0 1ekn0 1cL0 1fC0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|11e6","Europe/Moscow|LMT MMT MMT MST MDST MSD MSK +05 EET EEST MSK|-2u.h -2u.h -2v.j -3v.j -4v.j -40 -30 -50 -20 -30 -40|01232434565756865656565656565656565698656565656565656565656565656565656565656a6|-3D8Ou.h 1sQM0 2pyW.W 1bA0 11X0 GN0 1Hb0 c4v.j ik0 3DA0 dz0 15A0 c10 2q10 iM10 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0|16e6","Europe/Riga|LMT RMT LST EET MSK CEST CET MSD EEST|-1A.y -1A.y -2A.y -20 -30 -20 -10 -40 -30|0121213456565647474747474747474838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383838383|-3D8NA.y 1xde0 11A0 1iM0 ko0 gWm0 yDXA.y 2bX0 3fE0 WM0 1fA0 1cM0 1cM0 4m0 1sLy0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cN0 1o00 11A0 1o00 11A0 1qM0 3oo0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|64e4","Europe/Rome|LMT RMT CET CEST|-N.U -N.U -10 -20|012323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-4aU0N.U 15snN.U T000 Lz0 1cN0 1db0 1410 1on0 Wp0 1qL0 17d0 1cL0 M3B0 5M20 WM0 1fA0 1cM0 16M0 1iM0 16m0 1de0 1lc0 14m0 1lc0 WO0 1qM0 GTW0 On0 1C10 LA0 1C00 LA0 1EM0 LA0 1C00 LA0 1zc0 Oo0 1C00 Oo0 1C00 LA0 1zc0 Oo0 1C00 LA0 1C00 LA0 1zc0 Oo0 1C00 Oo0 1zc0 Oo0 1fC0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|39e5","Europe/Samara|LMT +03 +04 +05|-3k.k -30 -40 -50|0123232323232323232121232323232323232323232323232323232323212|-22WM0 qH90 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 2pB0 1cM0 1fA0 2y10 14m0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 2sp0 WM0|12e5","Europe/Saratov|LMT +03 +04 +05|-34.i -30 -40 -50|012323232323232321212121212121212121212121212121212121212121212|-22WM0 qH90 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 2pB0 1cM0 1cM0 1cM0 1fA0 1cM0 3Co0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 5810|","Europe/Simferopol|LMT SMT EET MSK CEST CET MSD EEST MSK|-2g.o -2g -20 -30 -20 -10 -40 -30 -40|0123454543636363636363636363272727636363727272727272727272727272727272727283|-3D8Og.o 1LUM0.o eUog rEn0 2qs0 WM0 1fA0 1cM0 3V0 1u0L0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1Q00 4eN0 1cM0 1cM0 1cM0 1cM0 dV0 WO0 1cM0 1cM0 1fy0 1o30 11B0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11z0 1nW0|33e4","Europe/Sofia|LMT IMT EET CET CEST EEST|-1x.g -1U.U -20 -10 -20 -30|0123434325252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252525252|-3D8Nx.g AiLA.k 1UFeU.U WM0 1fA0 1cM0 1cM0 1cN0 1mKH0 1dd0 1fb0 1ap0 1fb0 1a20 1fy0 1a30 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cK0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 1nX0 11E0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|12e5","Europe/Tallinn|LMT TMT CET CEST EET MSK MSD EEST|-1D -1D -10 -20 -20 -30 -40 -30|0123214532323565656565656565657474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474747474|-3D8ND 1wI00 teD 11A0 1Ta0 4rXl KSLD 2FX0 2Jg0 WM0 1fA0 1cM0 18J0 1sTX0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o10 11A0 1qM0 5QM0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|41e4","Europe/Tirane|LMT CET CEST|-1j.k -10 -20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-2glBj.k 14pcj.k 5LC0 WM0 4M0 1fCK0 10n0 1op0 11z0 1pd0 11z0 1qN0 WL0 1qp0 Xb0 1qp0 Xb0 1qp0 11z0 1lB0 11z0 1qN0 11z0 1iN0 16n0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|42e4","Europe/Ulyanovsk|LMT +03 +04 +05 +02|-3d.A -30 -40 -50 -20|01232323232323232321214121212121212121212121212121212121212121212|-22WM0 qH90 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 2pB0 1cM0 1fA0 2pB0 IM0 rX0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 3rd0|13e5","Europe/Vienna|LMT CET CEST|-15.l -10 -20|01212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121212121|-36Rd5.l UbX5.l 11d0 1iO0 11A0 1o00 11A0 3KM0 14o0 LA00 6i00 WM0 1fA0 1cM0 1cM0 1cM0 400 2qM0 1ao0 1co0 1cM0 1io0 17c0 1gHa0 19X0 1cP0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|18e5","Europe/Vilnius|LMT WMT KMT CET EET MSK CEST MSD EEST|-1F.g -1o -1z.A -10 -20 -30 -20 -40 -30|0123435636365757575757575757584848484848484848463648484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484848484|-3D8NF.g 1u5Ah.g 6ILM.o 1Ooz.A zz0 Mfd0 29W0 3is0 WM0 1fA0 1cM0 LV0 1tgL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11B0 1o00 11A0 1qM0 8io0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|54e4","Europe/Volgograd|LMT +03 +04 +05 MSD MSK MSK|-2V.E -30 -40 -50 -40 -30 -40|012323232323232324545452454545454545454545454545454545454545456525|-21IqV.E psLV.E 23CL0 1db0 1cN0 1db0 1cN0 1db0 1dd0 1cO0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1cM0 1cM0 1fA0 1cM0 2pz0 1cN0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 8Hz0 9Jd0 5gn0|10e5","Europe/Warsaw|LMT WMT CET CEST EET EEST|-1o -1o -10 -20 -20 -30|0123232345423232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232|-3D8No 1qDA0 1LXo 11d0 1iO0 11A0 1o00 11A0 1on0 11A0 6zy0 HWP0 5IM0 WM0 1fA0 1cM0 1dz0 1mL0 1en0 15B0 1aq0 1nA0 11A0 1io0 17c0 1fA0 1a00 iDX0 LA0 1cM0 1cM0 1C00 Oo0 1cM0 1cM0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1C00 LA0 uso0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cN0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|17e5","HST|HST|a0|0||","Indian/Chagos|LMT +05 +06|-4N.E -50 -60|012|-2xosN.E 3AGLN.E|30e2","Indian/Maldives|LMT MMT +05|-4S -4S -50|012|-3D8QS 3eLA0|35e4","Indian/Mauritius|LMT +04 +05|-3O -40 -50|012121|-2xorO 34unO 14L0 12kr0 11z0|15e4","Pacific/Kwajalein|LMT +11 +10 +09 -12 +12|-b9.k -b0 -a0 -90 c0 -c0|0123145|-2M0X9.k 1rDA9.k akp0 6Up0 12ry0 Wan0|14e3","MET|MET MEST|-10 -20|01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-2aFe0 11d0 1iO0 11A0 1o00 11A0 Qrc0 6i00 WM0 1fA0 1cM0 1cM0 1cM0 16M0 1gMM0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|","MST|MST|70|0||","MST7MDT|MST MDT MWT MPT|70 60 60 60|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261r0 1nX0 11B0 1nX0 SgN0 8x20 ix0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","Pacific/Chatham|LMT +1215 +1245 +1345|-cd.M -cf -cJ -dJ|0123232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323232323|-46jMd.M 37RbW.M 1adef IM0 1C00 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1qM0 14o0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1lc0 14o0 1lc0 14o0 1lc0 17c0 1io0 17c0 1io0 17c0 1io0 17c0 1io0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1io0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00|600","Pacific/Apia|LMT LMT -1130 -11 -10 +14 +13|-cx.4 bq.U bu b0 a0 -e0 -d0|012343456565656565656565656|-38Fox.4 J1A0 1yW03.4 2rRbu 1ff0 1a00 CI0 AQ0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0|37e3","Pacific/Bougainville|LMT PMMT +10 +09 +11|-am.g -9M.w -a0 -90 -b0|012324|-3D8Wm.g AvAx.I 1TCLM.w 7CN0 2MQp0|18e4","Pacific/Efate|LMT +11 +12|-bd.g -b0 -c0|012121212121212121212121|-2l9nd.g 2uNXd.g Dc0 n610 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 Lz0 1Nd0 An0|66e3","Pacific/Enderbury|-00 -12 -11 +13|0 c0 b0 -d0|0123|-1iIo0 1GsA0 B7X0|1","Pacific/Fakaofo|LMT -11 +13|bo.U b0 -d0|012|-2M0Az.4 4ufXz.4|483","Pacific/Fiji|LMT +12 +13|-bT.I -c0 -d0|012121212121212121212121212121|-2bUzT.I 3m8NT.I LA0 1EM0 IM0 nJc0 LA0 1o00 Rc0 1wo0 Ao0 1Nc0 Ao0 1Q00 xz0 1SN0 uM0 1SM0 uM0 1VA0 s00 1VA0 s00 1VA0 s00 20o0 pc0 2hc0 bc0|88e4","Pacific/Tarawa|LMT +12|-bw.4 -c0|01|-2M0Xw.4|29e3","Pacific/Galapagos|LMT -05 -06|5W.o 50 60|01212|-1yVS1.A 2dTz1.A gNd0 rz0|25e3","Pacific/Gambier|LMT -09|8X.M 90|01|-2jof0.c|125","Pacific/Guadalcanal|LMT +11|-aD.M -b0|01|-2joyD.M|11e4","Pacific/Guam|LMT LMT GST +09 GDT ChST|el -9D -a0 -90 -b0 -a0|0123242424242424242425|-54m9D 2glc0 1DFbD 6pB0 AhB0 3QL0 g2p0 3p91 WOX rX0 1zd0 Rb0 1wp0 Rb0 5xd0 rX0 5sN0 zb1 1C0X On0 ULb0|17e4","Pacific/Honolulu|LMT HST HDT HWT HPT HST|av.q au 9u 9u 9u a0|01213415|-3061s.y 1uMdW.y 8x0 lef0 8wWu iAu 46p0|37e4","Pacific/Kiritimati|LMT -1040 -10 +14|at.k aE a0 -e0|0123|-2M0Bu.E 3bIMa.E B7Xk|51e2","Pacific/Kosrae|LMT LMT +11 +09 +10 +12|d8.4 -aP.U -b0 -90 -a0 -c0|0123243252|-54maP.U 2glc0 xsnP.U axC0 HBy0 akp0 axd0 WOK0 1bdz0|66e2","Pacific/Marquesas|LMT -0930|9i 9u|01|-2joeG|86e2","Pacific/Pago_Pago|LMT LMT SST|-cB.c bm.M b0|012|-38FoB.c J1A0|37e2","Pacific/Nauru|LMT +1130 +09 +12|-b7.E -bu -90 -c0|01213|-1Xdn7.E QCnB.E 7mqu 1lnbu|10e3","Pacific/Niue|LMT -1120 -11|bj.E bk b0|012|-FScE.k suo0.k|12e2","Pacific/Norfolk|LMT +1112 +1130 +1230 +11 +12|-bb.Q -bc -bu -cu -b0 -c0|0123245454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545454545|-2M0Xb.Q 21ILX.Q W01G Oo0 1COo0 9Jcu 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0|25e4","Pacific/Noumea|LMT +11 +12|-b5.M -b0 -c0|01212121|-2l9n5.M 2EqM5.M xX0 1PB0 yn0 HeP0 Ao0|98e3","Pacific/Palau|LMT LMT +09|f2.4 -8V.U -90|012|-54m8V.U 2glc0|21e3","Pacific/Pitcairn|LMT -0830 -08|8E.k 8u 80|012|-2M0Dj.E 3UVXN.E|56","Pacific/Rarotonga|LMT LMT -1030 -0930 -10|-dk.U aD.4 au 9u a0|01234343434343434343434343434|-2Otpk.U 28zc0 13tbO.U IL0 1zcu Onu 1zcu Onu 1zcu Rbu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Onu 1zcu Rbu 1zcu Onu 1zcu Onu 1zcu Onu|13e3","Pacific/Tahiti|LMT -10|9W.g a0|01|-2joe1.I|18e4","Pacific/Tongatapu|LMT +1220 +13 +14|-cj.c -ck -d0 -e0|01232323232|-XbMj.c BgLX.c 1yndk 15A0 1wo0 xz0 1Q10 xz0 zWN0 s00|75e3","PST8PDT|PST PDT PWT PPT|80 70 70 70|010102301010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|-261q0 1nX0 11B0 1nX0 SgN0 8x10 iy0 QwN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1cN0 1cL0 1cN0 1cL0 s10 1Vz0 LB0 1BX0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 1cN0 1fz0 1a10 1fz0 1cN0 1cL0 1cN0 1cL0 1cN0 1cL0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0|","WET|WET WEST|0 -10|010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010|hDB0 1a00 1fA0 1cM0 1cM0 1cM0 1fA0 1a00 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00|"],links:["Africa/Abidjan|Africa/Accra","Africa/Abidjan|Africa/Bamako","Africa/Abidjan|Africa/Banjul","Africa/Abidjan|Africa/Conakry","Africa/Abidjan|Africa/Dakar","Africa/Abidjan|Africa/Freetown","Africa/Abidjan|Africa/Lome","Africa/Abidjan|Africa/Nouakchott","Africa/Abidjan|Africa/Ouagadougou","Africa/Abidjan|Africa/Timbuktu","Africa/Abidjan|Atlantic/Reykjavik","Africa/Abidjan|Atlantic/St_Helena","Africa/Abidjan|Iceland","Africa/Cairo|Egypt","Africa/Johannesburg|Africa/Maseru","Africa/Johannesburg|Africa/Mbabane","Africa/Lagos|Africa/Bangui","Africa/Lagos|Africa/Brazzaville","Africa/Lagos|Africa/Douala","Africa/Lagos|Africa/Kinshasa","Africa/Lagos|Africa/Libreville","Africa/Lagos|Africa/Luanda","Africa/Lagos|Africa/Malabo","Africa/Lagos|Africa/Niamey","Africa/Lagos|Africa/Porto-Novo","Africa/Maputo|Africa/Blantyre","Africa/Maputo|Africa/Bujumbura","Africa/Maputo|Africa/Gaborone","Africa/Maputo|Africa/Harare","Africa/Maputo|Africa/Kigali","Africa/Maputo|Africa/Lubumbashi","Africa/Maputo|Africa/Lusaka","Africa/Nairobi|Africa/Addis_Ababa","Africa/Nairobi|Africa/Asmara","Africa/Nairobi|Africa/Asmera","Africa/Nairobi|Africa/Dar_es_Salaam","Africa/Nairobi|Africa/Djibouti","Africa/Nairobi|Africa/Kampala","Africa/Nairobi|Africa/Mogadishu","Africa/Nairobi|Indian/Antananarivo","Africa/Nairobi|Indian/Comoro","Africa/Nairobi|Indian/Mayotte","Africa/Tripoli|Libya","America/Adak|America/Atka","America/Adak|US/Aleutian","America/Anchorage|US/Alaska","America/Argentina/Buenos_Aires|America/Buenos_Aires","America/Argentina/Catamarca|America/Argentina/ComodRivadavia","America/Argentina/Catamarca|America/Catamarca","America/Argentina/Cordoba|America/Cordoba","America/Argentina/Cordoba|America/Rosario","America/Argentina/Jujuy|America/Jujuy","America/Argentina/Mendoza|America/Mendoza","America/Chicago|US/Central","America/Denver|America/Shiprock","America/Denver|Navajo","America/Denver|US/Mountain","America/Detroit|US/Michigan","America/Edmonton|America/Yellowknife","America/Edmonton|Canada/Mountain","America/Fort_Wayne|America/Indiana/Indianapolis","America/Fort_Wayne|America/Indianapolis","America/Fort_Wayne|US/East-Indiana","America/Godthab|America/Nuuk","America/Halifax|Canada/Atlantic","America/Havana|Cuba","America/Indiana/Knox|America/Knox_IN","America/Indiana/Knox|US/Indiana-Starke","America/Iqaluit|America/Pangnirtung","America/Jamaica|Jamaica","America/Kentucky/Louisville|America/Louisville","America/Los_Angeles|US/Pacific","America/Manaus|Brazil/West","America/Mazatlan|Mexico/BajaSur","America/Mexico_City|Mexico/General","America/New_York|US/Eastern","America/Noronha|Brazil/DeNoronha","America/Panama|America/Atikokan","America/Panama|America/Cayman","America/Panama|America/Coral_Harbour","America/Phoenix|America/Creston","America/Phoenix|US/Arizona","America/Puerto_Rico|America/Anguilla","America/Puerto_Rico|America/Antigua","America/Puerto_Rico|America/Aruba","America/Puerto_Rico|America/Blanc-Sablon","America/Puerto_Rico|America/Curacao","America/Puerto_Rico|America/Dominica","America/Puerto_Rico|America/Grenada","America/Puerto_Rico|America/Guadeloupe","America/Puerto_Rico|America/Kralendijk","America/Puerto_Rico|America/Lower_Princes","America/Puerto_Rico|America/Marigot","America/Puerto_Rico|America/Montserrat","America/Puerto_Rico|America/Port_of_Spain","America/Puerto_Rico|America/St_Barthelemy","America/Puerto_Rico|America/St_Kitts","America/Puerto_Rico|America/St_Lucia","America/Puerto_Rico|America/St_Thomas","America/Puerto_Rico|America/St_Vincent","America/Puerto_Rico|America/Tortola","America/Puerto_Rico|America/Virgin","America/Regina|Canada/Saskatchewan","America/Rio_Branco|America/Porto_Acre","America/Rio_Branco|Brazil/Acre","America/Santiago|Chile/Continental","America/Sao_Paulo|Brazil/East","America/St_Johns|Canada/Newfoundland","America/Tijuana|America/Ensenada","America/Tijuana|America/Santa_Isabel","America/Tijuana|Mexico/BajaNorte","America/Toronto|America/Montreal","America/Toronto|America/Nassau","America/Toronto|America/Nipigon","America/Toronto|America/Thunder_Bay","America/Toronto|Canada/Eastern","America/Vancouver|Canada/Pacific","America/Whitehorse|Canada/Yukon","America/Winnipeg|America/Rainy_River","America/Winnipeg|Canada/Central","Asia/Ashgabat|Asia/Ashkhabad","Asia/Bangkok|Asia/Phnom_Penh","Asia/Bangkok|Asia/Vientiane","Asia/Bangkok|Indian/Christmas","Asia/Brunei|Asia/Kuching","Asia/Dhaka|Asia/Dacca","Asia/Dubai|Asia/Muscat","Asia/Dubai|Indian/Mahe","Asia/Dubai|Indian/Reunion","Asia/Ho_Chi_Minh|Asia/Saigon","Asia/Hong_Kong|Hongkong","Asia/Jerusalem|Asia/Tel_Aviv","Asia/Jerusalem|Israel","Asia/Kathmandu|Asia/Katmandu","Asia/Kolkata|Asia/Calcutta","Asia/Kuala_Lumpur|Asia/Singapore","Asia/Kuala_Lumpur|Singapore","Asia/Macau|Asia/Macao","Asia/Makassar|Asia/Ujung_Pandang","Asia/Nicosia|Europe/Nicosia","Asia/Qatar|Asia/Bahrain","Asia/Rangoon|Asia/Yangon","Asia/Rangoon|Indian/Cocos","Asia/Riyadh|Antarctica/Syowa","Asia/Riyadh|Asia/Aden","Asia/Riyadh|Asia/Kuwait","Asia/Seoul|ROK","Asia/Shanghai|Asia/Chongqing","Asia/Shanghai|Asia/Chungking","Asia/Shanghai|Asia/Harbin","Asia/Shanghai|PRC","Asia/Taipei|ROC","Asia/Tehran|Iran","Asia/Thimphu|Asia/Thimbu","Asia/Tokyo|Japan","Asia/Ulaanbaatar|Asia/Ulan_Bator","Asia/Urumqi|Asia/Kashgar","Atlantic/Faroe|Atlantic/Faeroe","Australia/Adelaide|Australia/South","Australia/Brisbane|Australia/Queensland","Australia/Broken_Hill|Australia/Yancowinna","Australia/Darwin|Australia/North","Australia/Hobart|Australia/Currie","Australia/Hobart|Australia/Tasmania","Australia/Lord_Howe|Australia/LHI","Australia/Melbourne|Australia/Victoria","Australia/Perth|Australia/West","Australia/Sydney|Australia/ACT","Australia/Sydney|Australia/Canberra","Australia/Sydney|Australia/NSW","Etc/GMT-0|Etc/GMT","Etc/GMT-0|Etc/GMT+0","Etc/GMT-0|Etc/GMT0","Etc/GMT-0|Etc/Greenwich","Etc/GMT-0|GMT","Etc/GMT-0|GMT+0","Etc/GMT-0|GMT-0","Etc/GMT-0|GMT0","Etc/GMT-0|Greenwich","Etc/UTC|Etc/UCT","Etc/UTC|Etc/Universal","Etc/UTC|Etc/Zulu","Etc/UTC|UCT","Etc/UTC|UTC","Etc/UTC|Universal","Etc/UTC|Zulu","Europe/Belgrade|Europe/Ljubljana","Europe/Belgrade|Europe/Podgorica","Europe/Belgrade|Europe/Sarajevo","Europe/Belgrade|Europe/Skopje","Europe/Belgrade|Europe/Zagreb","Europe/Berlin|Arctic/Longyearbyen","Europe/Berlin|Atlantic/Jan_Mayen","Europe/Berlin|Europe/Copenhagen","Europe/Berlin|Europe/Oslo","Europe/Berlin|Europe/Stockholm","Europe/Brussels|Europe/Amsterdam","Europe/Brussels|Europe/Luxembourg","Europe/Chisinau|Europe/Tiraspol","Europe/Dublin|Eire","Europe/Helsinki|Europe/Mariehamn","Europe/Istanbul|Asia/Istanbul","Europe/Istanbul|Turkey","Europe/Kiev|Europe/Kyiv","Europe/Kiev|Europe/Uzhgorod","Europe/Kiev|Europe/Zaporozhye","Europe/Lisbon|Portugal","Europe/London|Europe/Belfast","Europe/London|Europe/Guernsey","Europe/London|Europe/Isle_of_Man","Europe/London|Europe/Jersey","Europe/London|GB","Europe/London|GB-Eire","Europe/Moscow|W-SU","Europe/Paris|Europe/Monaco","Europe/Prague|Europe/Bratislava","Europe/Rome|Europe/San_Marino","Europe/Rome|Europe/Vatican","Europe/Warsaw|Poland","Europe/Zurich|Europe/Busingen","Europe/Zurich|Europe/Vaduz","Indian/Maldives|Indian/Kerguelen","Pacific/Auckland|Antarctica/McMurdo","Pacific/Auckland|Antarctica/South_Pole","Pacific/Auckland|NZ","Pacific/Chatham|NZ-CHAT","Pacific/Easter|Chile/EasterIsland","Pacific/Enderbury|Pacific/Kanton","Pacific/Guadalcanal|Pacific/Pohnpei","Pacific/Guadalcanal|Pacific/Ponape","Pacific/Guam|Pacific/Saipan","Pacific/Honolulu|Pacific/Johnston","Pacific/Honolulu|US/Hawaii","Pacific/Kwajalein|Kwajalein","Pacific/Pago_Pago|Pacific/Midway","Pacific/Pago_Pago|Pacific/Samoa","Pacific/Pago_Pago|US/Samoa","Pacific/Port_Moresby|Antarctica/DumontDUrville","Pacific/Port_Moresby|Pacific/Chuuk","Pacific/Port_Moresby|Pacific/Truk","Pacific/Port_Moresby|Pacific/Yap","Pacific/Tarawa|Pacific/Funafuti","Pacific/Tarawa|Pacific/Majuro","Pacific/Tarawa|Pacific/Wake","Pacific/Tarawa|Pacific/Wallis"],countries:["AD|Europe/Andorra","AE|Asia/Dubai","AF|Asia/Kabul","AG|America/Puerto_Rico America/Antigua","AI|America/Puerto_Rico America/Anguilla","AL|Europe/Tirane","AM|Asia/Yerevan","AO|Africa/Lagos Africa/Luanda","AQ|Antarctica/Casey Antarctica/Davis Antarctica/Mawson Antarctica/Palmer Antarctica/Rothera Antarctica/Troll Antarctica/Vostok Pacific/Auckland Pacific/Port_Moresby Asia/Riyadh Antarctica/McMurdo Antarctica/DumontDUrville Antarctica/Syowa","AR|America/Argentina/Buenos_Aires America/Argentina/Cordoba America/Argentina/Salta America/Argentina/Jujuy America/Argentina/Tucuman America/Argentina/Catamarca America/Argentina/La_Rioja America/Argentina/San_Juan America/Argentina/Mendoza America/Argentina/San_Luis America/Argentina/Rio_Gallegos America/Argentina/Ushuaia","AS|Pacific/Pago_Pago","AT|Europe/Vienna","AU|Australia/Lord_Howe Antarctica/Macquarie Australia/Hobart Australia/Melbourne Australia/Sydney Australia/Broken_Hill Australia/Brisbane Australia/Lindeman Australia/Adelaide Australia/Darwin Australia/Perth Australia/Eucla","AW|America/Puerto_Rico America/Aruba","AX|Europe/Helsinki Europe/Mariehamn","AZ|Asia/Baku","BA|Europe/Belgrade Europe/Sarajevo","BB|America/Barbados","BD|Asia/Dhaka","BE|Europe/Brussels","BF|Africa/Abidjan Africa/Ouagadougou","BG|Europe/Sofia","BH|Asia/Qatar Asia/Bahrain","BI|Africa/Maputo Africa/Bujumbura","BJ|Africa/Lagos Africa/Porto-Novo","BL|America/Puerto_Rico America/St_Barthelemy","BM|Atlantic/Bermuda","BN|Asia/Kuching Asia/Brunei","BO|America/La_Paz","BQ|America/Puerto_Rico America/Kralendijk","BR|America/Noronha America/Belem America/Fortaleza America/Recife America/Araguaina America/Maceio America/Bahia America/Sao_Paulo America/Campo_Grande America/Cuiaba America/Santarem America/Porto_Velho America/Boa_Vista America/Manaus America/Eirunepe America/Rio_Branco","BS|America/Toronto America/Nassau","BT|Asia/Thimphu","BW|Africa/Maputo Africa/Gaborone","BY|Europe/Minsk","BZ|America/Belize","CA|America/St_Johns America/Halifax America/Glace_Bay America/Moncton America/Goose_Bay America/Toronto America/Iqaluit America/Winnipeg America/Resolute America/Rankin_Inlet America/Regina America/Swift_Current America/Edmonton America/Cambridge_Bay America/Inuvik America/Dawson_Creek America/Fort_Nelson America/Whitehorse America/Dawson America/Vancouver America/Panama America/Puerto_Rico America/Phoenix America/Blanc-Sablon America/Atikokan America/Creston","CC|Asia/Yangon Indian/Cocos","CD|Africa/Maputo Africa/Lagos Africa/Kinshasa Africa/Lubumbashi","CF|Africa/Lagos Africa/Bangui","CG|Africa/Lagos Africa/Brazzaville","CH|Europe/Zurich","CI|Africa/Abidjan","CK|Pacific/Rarotonga","CL|America/Santiago America/Punta_Arenas Pacific/Easter","CM|Africa/Lagos Africa/Douala","CN|Asia/Shanghai Asia/Urumqi","CO|America/Bogota","CR|America/Costa_Rica","CU|America/Havana","CV|Atlantic/Cape_Verde","CW|America/Puerto_Rico America/Curacao","CX|Asia/Bangkok Indian/Christmas","CY|Asia/Nicosia Asia/Famagusta","CZ|Europe/Prague","DE|Europe/Zurich Europe/Berlin Europe/Busingen","DJ|Africa/Nairobi Africa/Djibouti","DK|Europe/Berlin Europe/Copenhagen","DM|America/Puerto_Rico America/Dominica","DO|America/Santo_Domingo","DZ|Africa/Algiers","EC|America/Guayaquil Pacific/Galapagos","EE|Europe/Tallinn","EG|Africa/Cairo","EH|Africa/El_Aaiun","ER|Africa/Nairobi Africa/Asmara","ES|Europe/Madrid Africa/Ceuta Atlantic/Canary","ET|Africa/Nairobi Africa/Addis_Ababa","FI|Europe/Helsinki","FJ|Pacific/Fiji","FK|Atlantic/Stanley","FM|Pacific/Kosrae Pacific/Port_Moresby Pacific/Guadalcanal Pacific/Chuuk Pacific/Pohnpei","FO|Atlantic/Faroe","FR|Europe/Paris","GA|Africa/Lagos Africa/Libreville","GB|Europe/London","GD|America/Puerto_Rico America/Grenada","GE|Asia/Tbilisi","GF|America/Cayenne","GG|Europe/London Europe/Guernsey","GH|Africa/Abidjan Africa/Accra","GI|Europe/Gibraltar","GL|America/Nuuk America/Danmarkshavn America/Scoresbysund America/Thule","GM|Africa/Abidjan Africa/Banjul","GN|Africa/Abidjan Africa/Conakry","GP|America/Puerto_Rico America/Guadeloupe","GQ|Africa/Lagos Africa/Malabo","GR|Europe/Athens","GS|Atlantic/South_Georgia","GT|America/Guatemala","GU|Pacific/Guam","GW|Africa/Bissau","GY|America/Guyana","HK|Asia/Hong_Kong","HN|America/Tegucigalpa","HR|Europe/Belgrade Europe/Zagreb","HT|America/Port-au-Prince","HU|Europe/Budapest","ID|Asia/Jakarta Asia/Pontianak Asia/Makassar Asia/Jayapura","IE|Europe/Dublin","IL|Asia/Jerusalem","IM|Europe/London Europe/Isle_of_Man","IN|Asia/Kolkata","IO|Indian/Chagos","IQ|Asia/Baghdad","IR|Asia/Tehran","IS|Africa/Abidjan Atlantic/Reykjavik","IT|Europe/Rome","JE|Europe/London Europe/Jersey","JM|America/Jamaica","JO|Asia/Amman","JP|Asia/Tokyo","KE|Africa/Nairobi","KG|Asia/Bishkek","KH|Asia/Bangkok Asia/Phnom_Penh","KI|Pacific/Tarawa Pacific/Kanton Pacific/Kiritimati","KM|Africa/Nairobi Indian/Comoro","KN|America/Puerto_Rico America/St_Kitts","KP|Asia/Pyongyang","KR|Asia/Seoul","KW|Asia/Riyadh Asia/Kuwait","KY|America/Panama America/Cayman","KZ|Asia/Almaty Asia/Qyzylorda Asia/Qostanay Asia/Aqtobe Asia/Aqtau Asia/Atyrau Asia/Oral","LA|Asia/Bangkok Asia/Vientiane","LB|Asia/Beirut","LC|America/Puerto_Rico America/St_Lucia","LI|Europe/Zurich Europe/Vaduz","LK|Asia/Colombo","LR|Africa/Monrovia","LS|Africa/Johannesburg Africa/Maseru","LT|Europe/Vilnius","LU|Europe/Brussels Europe/Luxembourg","LV|Europe/Riga","LY|Africa/Tripoli","MA|Africa/Casablanca","MC|Europe/Paris Europe/Monaco","MD|Europe/Chisinau","ME|Europe/Belgrade Europe/Podgorica","MF|America/Puerto_Rico America/Marigot","MG|Africa/Nairobi Indian/Antananarivo","MH|Pacific/Tarawa Pacific/Kwajalein Pacific/Majuro","MK|Europe/Belgrade Europe/Skopje","ML|Africa/Abidjan Africa/Bamako","MM|Asia/Yangon","MN|Asia/Ulaanbaatar Asia/Hovd Asia/Choibalsan","MO|Asia/Macau","MP|Pacific/Guam Pacific/Saipan","MQ|America/Martinique","MR|Africa/Abidjan Africa/Nouakchott","MS|America/Puerto_Rico America/Montserrat","MT|Europe/Malta","MU|Indian/Mauritius","MV|Indian/Maldives","MW|Africa/Maputo Africa/Blantyre","MX|America/Mexico_City America/Cancun America/Merida America/Monterrey America/Matamoros America/Chihuahua America/Ciudad_Juarez America/Ojinaga America/Mazatlan America/Bahia_Banderas America/Hermosillo America/Tijuana","MY|Asia/Kuching Asia/Singapore Asia/Kuala_Lumpur","MZ|Africa/Maputo","NA|Africa/Windhoek","NC|Pacific/Noumea","NE|Africa/Lagos Africa/Niamey","NF|Pacific/Norfolk","NG|Africa/Lagos","NI|America/Managua","NL|Europe/Brussels Europe/Amsterdam","NO|Europe/Berlin Europe/Oslo","NP|Asia/Kathmandu","NR|Pacific/Nauru","NU|Pacific/Niue","NZ|Pacific/Auckland Pacific/Chatham","OM|Asia/Dubai Asia/Muscat","PA|America/Panama","PE|America/Lima","PF|Pacific/Tahiti Pacific/Marquesas Pacific/Gambier","PG|Pacific/Port_Moresby Pacific/Bougainville","PH|Asia/Manila","PK|Asia/Karachi","PL|Europe/Warsaw","PM|America/Miquelon","PN|Pacific/Pitcairn","PR|America/Puerto_Rico","PS|Asia/Gaza Asia/Hebron","PT|Europe/Lisbon Atlantic/Madeira Atlantic/Azores","PW|Pacific/Palau","PY|America/Asuncion","QA|Asia/Qatar","RE|Asia/Dubai Indian/Reunion","RO|Europe/Bucharest","RS|Europe/Belgrade","RU|Europe/Kaliningrad Europe/Moscow Europe/Simferopol Europe/Kirov Europe/Volgograd Europe/Astrakhan Europe/Saratov Europe/Ulyanovsk Europe/Samara Asia/Yekaterinburg Asia/Omsk Asia/Novosibirsk Asia/Barnaul Asia/Tomsk Asia/Novokuznetsk Asia/Krasnoyarsk Asia/Irkutsk Asia/Chita Asia/Yakutsk Asia/Khandyga Asia/Vladivostok Asia/Ust-Nera Asia/Magadan Asia/Sakhalin Asia/Srednekolymsk Asia/Kamchatka Asia/Anadyr","RW|Africa/Maputo Africa/Kigali","SA|Asia/Riyadh","SB|Pacific/Guadalcanal","SC|Asia/Dubai Indian/Mahe","SD|Africa/Khartoum","SE|Europe/Berlin Europe/Stockholm","SG|Asia/Singapore","SH|Africa/Abidjan Atlantic/St_Helena","SI|Europe/Belgrade Europe/Ljubljana","SJ|Europe/Berlin Arctic/Longyearbyen","SK|Europe/Prague Europe/Bratislava","SL|Africa/Abidjan Africa/Freetown","SM|Europe/Rome Europe/San_Marino","SN|Africa/Abidjan Africa/Dakar","SO|Africa/Nairobi Africa/Mogadishu","SR|America/Paramaribo","SS|Africa/Juba","ST|Africa/Sao_Tome","SV|America/El_Salvador","SX|America/Puerto_Rico America/Lower_Princes","SY|Asia/Damascus","SZ|Africa/Johannesburg Africa/Mbabane","TC|America/Grand_Turk","TD|Africa/Ndjamena","TF|Asia/Dubai Indian/Maldives Indian/Kerguelen","TG|Africa/Abidjan Africa/Lome","TH|Asia/Bangkok","TJ|Asia/Dushanbe","TK|Pacific/Fakaofo","TL|Asia/Dili","TM|Asia/Ashgabat","TN|Africa/Tunis","TO|Pacific/Tongatapu","TR|Europe/Istanbul","TT|America/Puerto_Rico America/Port_of_Spain","TV|Pacific/Tarawa Pacific/Funafuti","TW|Asia/Taipei","TZ|Africa/Nairobi Africa/Dar_es_Salaam","UA|Europe/Simferopol Europe/Kyiv","UG|Africa/Nairobi Africa/Kampala","UM|Pacific/Pago_Pago Pacific/Tarawa Pacific/Midway Pacific/Wake","US|America/New_York America/Detroit America/Kentucky/Louisville America/Kentucky/Monticello America/Indiana/Indianapolis America/Indiana/Vincennes America/Indiana/Winamac America/Indiana/Marengo America/Indiana/Petersburg America/Indiana/Vevay America/Chicago America/Indiana/Tell_City America/Indiana/Knox America/Menominee America/North_Dakota/Center America/North_Dakota/New_Salem America/North_Dakota/Beulah America/Denver America/Boise America/Phoenix America/Los_Angeles America/Anchorage America/Juneau America/Sitka America/Metlakatla America/Yakutat America/Nome America/Adak Pacific/Honolulu","UY|America/Montevideo","UZ|Asia/Samarkand Asia/Tashkent","VA|Europe/Rome Europe/Vatican","VC|America/Puerto_Rico America/St_Vincent","VE|America/Caracas","VG|America/Puerto_Rico America/Tortola","VI|America/Puerto_Rico America/St_Thomas","VN|Asia/Bangkok Asia/Ho_Chi_Minh","VU|Pacific/Efate","WF|Pacific/Tarawa Pacific/Wallis","WS|Pacific/Apia","YE|Asia/Riyadh Asia/Aden","YT|Africa/Nairobi Indian/Mayotte","ZA|Africa/Johannesburg","ZM|Africa/Maputo Africa/Lusaka","ZW|Africa/Maputo Africa/Harare"]}),O}); \ No newline at end of file diff --git a/Products/ZenUI3/browser/resources/js/timezone/moment.min.js b/Products/ZenUI3/browser/resources/js/timezone/moment.min.js index 8e6866af04..8b80f200c0 100644 --- a/Products/ZenUI3/browser/resources/js/timezone/moment.min.js +++ b/Products/ZenUI3/browser/resources/js/timezone/moment.min.js @@ -1,7 +1,2 @@ -//! moment.js -//! version : 2.10.6 -//! authors : Tim Wood, Iskren Chernev, Moment.js contributors -//! license : MIT -//! momentjs.com -!function(a,b){"object"==typeof exports&&"undefined"!=typeof module?module.exports=b():"function"==typeof define&&define.amd?define(b):a.moment=b()}(this,function(){"use strict";function a(){return Hc.apply(null,arguments)}function b(a){Hc=a}function c(a){return"[object Array]"===Object.prototype.toString.call(a)}function d(a){return a instanceof Date||"[object Date]"===Object.prototype.toString.call(a)}function e(a,b){var c,d=[];for(c=0;c0)for(c in Jc)d=Jc[c],e=b[d],"undefined"!=typeof e&&(a[d]=e);return a}function n(b){m(this,b),this._d=new Date(null!=b._d?b._d.getTime():NaN),Kc===!1&&(Kc=!0,a.updateOffset(this),Kc=!1)}function o(a){return a instanceof n||null!=a&&null!=a._isAMomentObject}function p(a){return 0>a?Math.ceil(a):Math.floor(a)}function q(a){var b=+a,c=0;return 0!==b&&isFinite(b)&&(c=p(b)),c}function r(a,b,c){var d,e=Math.min(a.length,b.length),f=Math.abs(a.length-b.length),g=0;for(d=0;e>d;d++)(c&&a[d]!==b[d]||!c&&q(a[d])!==q(b[d]))&&g++;return g+f}function s(){}function t(a){return a?a.toLowerCase().replace("_","-"):a}function u(a){for(var b,c,d,e,f=0;f0;){if(d=v(e.slice(0,b).join("-")))return d;if(c&&c.length>=b&&r(e,c,!0)>=b-1)break;b--}f++}return null}function v(a){var b=null;if(!Lc[a]&&"undefined"!=typeof module&&module&&module.exports)try{b=Ic._abbr,require("./locale/"+a),w(b)}catch(c){}return Lc[a]}function w(a,b){var c;return a&&(c="undefined"==typeof b?y(a):x(a,b),c&&(Ic=c)),Ic._abbr}function x(a,b){return null!==b?(b.abbr=a,Lc[a]=Lc[a]||new s,Lc[a].set(b),w(a),Lc[a]):(delete Lc[a],null)}function y(a){var b;if(a&&a._locale&&a._locale._abbr&&(a=a._locale._abbr),!a)return Ic;if(!c(a)){if(b=v(a))return b;a=[a]}return u(a)}function z(a,b){var c=a.toLowerCase();Mc[c]=Mc[c+"s"]=Mc[b]=a}function A(a){return"string"==typeof a?Mc[a]||Mc[a.toLowerCase()]:void 0}function B(a){var b,c,d={};for(c in a)f(a,c)&&(b=A(c),b&&(d[b]=a[c]));return d}function C(b,c){return function(d){return null!=d?(E(this,b,d),a.updateOffset(this,c),this):D(this,b)}}function D(a,b){return a._d["get"+(a._isUTC?"UTC":"")+b]()}function E(a,b,c){return a._d["set"+(a._isUTC?"UTC":"")+b](c)}function F(a,b){var c;if("object"==typeof a)for(c in a)this.set(c,a[c]);else if(a=A(a),"function"==typeof this[a])return this[a](b);return this}function G(a,b,c){var d=""+Math.abs(a),e=b-d.length,f=a>=0;return(f?c?"+":"":"-")+Math.pow(10,Math.max(0,e)).toString().substr(1)+d}function H(a,b,c,d){var e=d;"string"==typeof d&&(e=function(){return this[d]()}),a&&(Qc[a]=e),b&&(Qc[b[0]]=function(){return G(e.apply(this,arguments),b[1],b[2])}),c&&(Qc[c]=function(){return this.localeData().ordinal(e.apply(this,arguments),a)})}function I(a){return a.match(/\[[\s\S]/)?a.replace(/^\[|\]$/g,""):a.replace(/\\/g,"")}function J(a){var b,c,d=a.match(Nc);for(b=0,c=d.length;c>b;b++)Qc[d[b]]?d[b]=Qc[d[b]]:d[b]=I(d[b]);return function(e){var f="";for(b=0;c>b;b++)f+=d[b]instanceof Function?d[b].call(e,a):d[b];return f}}function K(a,b){return a.isValid()?(b=L(b,a.localeData()),Pc[b]=Pc[b]||J(b),Pc[b](a)):a.localeData().invalidDate()}function L(a,b){function c(a){return b.longDateFormat(a)||a}var d=5;for(Oc.lastIndex=0;d>=0&&Oc.test(a);)a=a.replace(Oc,c),Oc.lastIndex=0,d-=1;return a}function M(a){return"function"==typeof a&&"[object Function]"===Object.prototype.toString.call(a)}function N(a,b,c){dd[a]=M(b)?b:function(a){return a&&c?c:b}}function O(a,b){return f(dd,a)?dd[a](b._strict,b._locale):new RegExp(P(a))}function P(a){return a.replace("\\","").replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(a,b,c,d,e){return b||c||d||e}).replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&")}function Q(a,b){var c,d=b;for("string"==typeof a&&(a=[a]),"number"==typeof b&&(d=function(a,c){c[b]=q(a)}),c=0;cd;d++){if(e=h([2e3,d]),c&&!this._longMonthsParse[d]&&(this._longMonthsParse[d]=new RegExp("^"+this.months(e,"").replace(".","")+"$","i"),this._shortMonthsParse[d]=new RegExp("^"+this.monthsShort(e,"").replace(".","")+"$","i")),c||this._monthsParse[d]||(f="^"+this.months(e,"")+"|^"+this.monthsShort(e,""),this._monthsParse[d]=new RegExp(f.replace(".",""),"i")),c&&"MMMM"===b&&this._longMonthsParse[d].test(a))return d;if(c&&"MMM"===b&&this._shortMonthsParse[d].test(a))return d;if(!c&&this._monthsParse[d].test(a))return d}}function X(a,b){var c;return"string"==typeof b&&(b=a.localeData().monthsParse(b),"number"!=typeof b)?a:(c=Math.min(a.date(),T(a.year(),b)),a._d["set"+(a._isUTC?"UTC":"")+"Month"](b,c),a)}function Y(b){return null!=b?(X(this,b),a.updateOffset(this,!0),this):D(this,"Month")}function Z(){return T(this.year(),this.month())}function $(a){var b,c=a._a;return c&&-2===j(a).overflow&&(b=c[gd]<0||c[gd]>11?gd:c[hd]<1||c[hd]>T(c[fd],c[gd])?hd:c[id]<0||c[id]>24||24===c[id]&&(0!==c[jd]||0!==c[kd]||0!==c[ld])?id:c[jd]<0||c[jd]>59?jd:c[kd]<0||c[kd]>59?kd:c[ld]<0||c[ld]>999?ld:-1,j(a)._overflowDayOfYear&&(fd>b||b>hd)&&(b=hd),j(a).overflow=b),a}function _(b){a.suppressDeprecationWarnings===!1&&"undefined"!=typeof console&&console.warn&&console.warn("Deprecation warning: "+b)}function aa(a,b){var c=!0;return g(function(){return c&&(_(a+"\n"+(new Error).stack),c=!1),b.apply(this,arguments)},b)}function ba(a,b){od[a]||(_(b),od[a]=!0)}function ca(a){var b,c,d=a._i,e=pd.exec(d);if(e){for(j(a).iso=!0,b=0,c=qd.length;c>b;b++)if(qd[b][1].exec(d)){a._f=qd[b][0];break}for(b=0,c=rd.length;c>b;b++)if(rd[b][1].exec(d)){a._f+=(e[6]||" ")+rd[b][0];break}d.match(ad)&&(a._f+="Z"),va(a)}else a._isValid=!1}function da(b){var c=sd.exec(b._i);return null!==c?void(b._d=new Date(+c[1])):(ca(b),void(b._isValid===!1&&(delete b._isValid,a.createFromInputFallback(b))))}function ea(a,b,c,d,e,f,g){var h=new Date(a,b,c,d,e,f,g);return 1970>a&&h.setFullYear(a),h}function fa(a){var b=new Date(Date.UTC.apply(null,arguments));return 1970>a&&b.setUTCFullYear(a),b}function ga(a){return ha(a)?366:365}function ha(a){return a%4===0&&a%100!==0||a%400===0}function ia(){return ha(this.year())}function ja(a,b,c){var d,e=c-b,f=c-a.day();return f>e&&(f-=7),e-7>f&&(f+=7),d=Da(a).add(f,"d"),{week:Math.ceil(d.dayOfYear()/7),year:d.year()}}function ka(a){return ja(a,this._week.dow,this._week.doy).week}function la(){return this._week.dow}function ma(){return this._week.doy}function na(a){var b=this.localeData().week(this);return null==a?b:this.add(7*(a-b),"d")}function oa(a){var b=ja(this,1,4).week;return null==a?b:this.add(7*(a-b),"d")}function pa(a,b,c,d,e){var f,g=6+e-d,h=fa(a,0,1+g),i=h.getUTCDay();return e>i&&(i+=7),c=null!=c?1*c:e,f=1+g+7*(b-1)-i+c,{year:f>0?a:a-1,dayOfYear:f>0?f:ga(a-1)+f}}function qa(a){var b=Math.round((this.clone().startOf("day")-this.clone().startOf("year"))/864e5)+1;return null==a?b:this.add(a-b,"d")}function ra(a,b,c){return null!=a?a:null!=b?b:c}function sa(a){var b=new Date;return a._useUTC?[b.getUTCFullYear(),b.getUTCMonth(),b.getUTCDate()]:[b.getFullYear(),b.getMonth(),b.getDate()]}function ta(a){var b,c,d,e,f=[];if(!a._d){for(d=sa(a),a._w&&null==a._a[hd]&&null==a._a[gd]&&ua(a),a._dayOfYear&&(e=ra(a._a[fd],d[fd]),a._dayOfYear>ga(e)&&(j(a)._overflowDayOfYear=!0),c=fa(e,0,a._dayOfYear),a._a[gd]=c.getUTCMonth(),a._a[hd]=c.getUTCDate()),b=0;3>b&&null==a._a[b];++b)a._a[b]=f[b]=d[b];for(;7>b;b++)a._a[b]=f[b]=null==a._a[b]?2===b?1:0:a._a[b];24===a._a[id]&&0===a._a[jd]&&0===a._a[kd]&&0===a._a[ld]&&(a._nextDay=!0,a._a[id]=0),a._d=(a._useUTC?fa:ea).apply(null,f),null!=a._tzm&&a._d.setUTCMinutes(a._d.getUTCMinutes()-a._tzm),a._nextDay&&(a._a[id]=24)}}function ua(a){var b,c,d,e,f,g,h;b=a._w,null!=b.GG||null!=b.W||null!=b.E?(f=1,g=4,c=ra(b.GG,a._a[fd],ja(Da(),1,4).year),d=ra(b.W,1),e=ra(b.E,1)):(f=a._locale._week.dow,g=a._locale._week.doy,c=ra(b.gg,a._a[fd],ja(Da(),f,g).year),d=ra(b.w,1),null!=b.d?(e=b.d,f>e&&++d):e=null!=b.e?b.e+f:f),h=pa(c,d,e,g,f),a._a[fd]=h.year,a._dayOfYear=h.dayOfYear}function va(b){if(b._f===a.ISO_8601)return void ca(b);b._a=[],j(b).empty=!0;var c,d,e,f,g,h=""+b._i,i=h.length,k=0;for(e=L(b._f,b._locale).match(Nc)||[],c=0;c0&&j(b).unusedInput.push(g),h=h.slice(h.indexOf(d)+d.length),k+=d.length),Qc[f]?(d?j(b).empty=!1:j(b).unusedTokens.push(f),S(f,d,b)):b._strict&&!d&&j(b).unusedTokens.push(f);j(b).charsLeftOver=i-k,h.length>0&&j(b).unusedInput.push(h),j(b).bigHour===!0&&b._a[id]<=12&&b._a[id]>0&&(j(b).bigHour=void 0),b._a[id]=wa(b._locale,b._a[id],b._meridiem),ta(b),$(b)}function wa(a,b,c){var d;return null==c?b:null!=a.meridiemHour?a.meridiemHour(b,c):null!=a.isPM?(d=a.isPM(c),d&&12>b&&(b+=12),d||12!==b||(b=0),b):b}function xa(a){var b,c,d,e,f;if(0===a._f.length)return j(a).invalidFormat=!0,void(a._d=new Date(NaN));for(e=0;ef)&&(d=f,c=b));g(a,c||b)}function ya(a){if(!a._d){var b=B(a._i);a._a=[b.year,b.month,b.day||b.date,b.hour,b.minute,b.second,b.millisecond],ta(a)}}function za(a){var b=new n($(Aa(a)));return b._nextDay&&(b.add(1,"d"),b._nextDay=void 0),b}function Aa(a){var b=a._i,e=a._f;return a._locale=a._locale||y(a._l),null===b||void 0===e&&""===b?l({nullInput:!0}):("string"==typeof b&&(a._i=b=a._locale.preparse(b)),o(b)?new n($(b)):(c(e)?xa(a):e?va(a):d(b)?a._d=b:Ba(a),a))}function Ba(b){var f=b._i;void 0===f?b._d=new Date:d(f)?b._d=new Date(+f):"string"==typeof f?da(b):c(f)?(b._a=e(f.slice(0),function(a){return parseInt(a,10)}),ta(b)):"object"==typeof f?ya(b):"number"==typeof f?b._d=new Date(f):a.createFromInputFallback(b)}function Ca(a,b,c,d,e){var f={};return"boolean"==typeof c&&(d=c,c=void 0),f._isAMomentObject=!0,f._useUTC=f._isUTC=e,f._l=c,f._i=a,f._f=b,f._strict=d,za(f)}function Da(a,b,c,d){return Ca(a,b,c,d,!1)}function Ea(a,b){var d,e;if(1===b.length&&c(b[0])&&(b=b[0]),!b.length)return Da();for(d=b[0],e=1;ea&&(a=-a,c="-"),c+G(~~(a/60),2)+b+G(~~a%60,2)})}function Ka(a){var b=(a||"").match(ad)||[],c=b[b.length-1]||[],d=(c+"").match(xd)||["-",0,0],e=+(60*d[1])+q(d[2]);return"+"===d[0]?e:-e}function La(b,c){var e,f;return c._isUTC?(e=c.clone(),f=(o(b)||d(b)?+b:+Da(b))-+e,e._d.setTime(+e._d+f),a.updateOffset(e,!1),e):Da(b).local()}function Ma(a){return 15*-Math.round(a._d.getTimezoneOffset()/15)}function Na(b,c){var d,e=this._offset||0;return null!=b?("string"==typeof b&&(b=Ka(b)),Math.abs(b)<16&&(b=60*b),!this._isUTC&&c&&(d=Ma(this)),this._offset=b,this._isUTC=!0,null!=d&&this.add(d,"m"),e!==b&&(!c||this._changeInProgress?bb(this,Ya(b-e,"m"),1,!1):this._changeInProgress||(this._changeInProgress=!0,a.updateOffset(this,!0),this._changeInProgress=null)),this):this._isUTC?e:Ma(this)}function Oa(a,b){return null!=a?("string"!=typeof a&&(a=-a),this.utcOffset(a,b),this):-this.utcOffset()}function Pa(a){return this.utcOffset(0,a)}function Qa(a){return this._isUTC&&(this.utcOffset(0,a),this._isUTC=!1,a&&this.subtract(Ma(this),"m")),this}function Ra(){return this._tzm?this.utcOffset(this._tzm):"string"==typeof this._i&&this.utcOffset(Ka(this._i)),this}function Sa(a){return a=a?Da(a).utcOffset():0,(this.utcOffset()-a)%60===0}function Ta(){return this.utcOffset()>this.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()}function Ua(){if("undefined"!=typeof this._isDSTShifted)return this._isDSTShifted;var a={};if(m(a,this),a=Aa(a),a._a){var b=a._isUTC?h(a._a):Da(a._a);this._isDSTShifted=this.isValid()&&r(a._a,b.toArray())>0}else this._isDSTShifted=!1;return this._isDSTShifted}function Va(){return!this._isUTC}function Wa(){return this._isUTC}function Xa(){return this._isUTC&&0===this._offset}function Ya(a,b){var c,d,e,g=a,h=null;return Ia(a)?g={ms:a._milliseconds,d:a._days,M:a._months}:"number"==typeof a?(g={},b?g[b]=a:g.milliseconds=a):(h=yd.exec(a))?(c="-"===h[1]?-1:1,g={y:0,d:q(h[hd])*c,h:q(h[id])*c,m:q(h[jd])*c,s:q(h[kd])*c,ms:q(h[ld])*c}):(h=zd.exec(a))?(c="-"===h[1]?-1:1,g={y:Za(h[2],c),M:Za(h[3],c),d:Za(h[4],c),h:Za(h[5],c),m:Za(h[6],c),s:Za(h[7],c),w:Za(h[8],c)}):null==g?g={}:"object"==typeof g&&("from"in g||"to"in g)&&(e=_a(Da(g.from),Da(g.to)),g={},g.ms=e.milliseconds,g.M=e.months),d=new Ha(g),Ia(a)&&f(a,"_locale")&&(d._locale=a._locale),d}function Za(a,b){var c=a&&parseFloat(a.replace(",","."));return(isNaN(c)?0:c)*b}function $a(a,b){var c={milliseconds:0,months:0};return c.months=b.month()-a.month()+12*(b.year()-a.year()),a.clone().add(c.months,"M").isAfter(b)&&--c.months,c.milliseconds=+b-+a.clone().add(c.months,"M"),c}function _a(a,b){var c;return b=La(b,a),a.isBefore(b)?c=$a(a,b):(c=$a(b,a),c.milliseconds=-c.milliseconds,c.months=-c.months),c}function ab(a,b){return function(c,d){var e,f;return null===d||isNaN(+d)||(ba(b,"moment()."+b+"(period, number) is deprecated. Please use moment()."+b+"(number, period)."),f=c,c=d,d=f),c="string"==typeof c?+c:c,e=Ya(c,d),bb(this,e,a),this}}function bb(b,c,d,e){var f=c._milliseconds,g=c._days,h=c._months;e=null==e?!0:e,f&&b._d.setTime(+b._d+f*d),g&&E(b,"Date",D(b,"Date")+g*d),h&&X(b,D(b,"Month")+h*d),e&&a.updateOffset(b,g||h)}function cb(a,b){var c=a||Da(),d=La(c,this).startOf("day"),e=this.diff(d,"days",!0),f=-6>e?"sameElse":-1>e?"lastWeek":0>e?"lastDay":1>e?"sameDay":2>e?"nextDay":7>e?"nextWeek":"sameElse";return this.format(b&&b[f]||this.localeData().calendar(f,this,Da(c)))}function db(){return new n(this)}function eb(a,b){var c;return b=A("undefined"!=typeof b?b:"millisecond"),"millisecond"===b?(a=o(a)?a:Da(a),+this>+a):(c=o(a)?+a:+Da(a),c<+this.clone().startOf(b))}function fb(a,b){var c;return b=A("undefined"!=typeof b?b:"millisecond"),"millisecond"===b?(a=o(a)?a:Da(a),+a>+this):(c=o(a)?+a:+Da(a),+this.clone().endOf(b)b-f?(c=a.clone().add(e-1,"months"),d=(b-f)/(f-c)):(c=a.clone().add(e+1,"months"),d=(b-f)/(c-f)),-(e+d)}function kb(){return this.clone().locale("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")}function lb(){var a=this.clone().utc();return 0b;b++)if(this._weekdaysParse[b]||(c=Da([2e3,1]).day(b),d="^"+this.weekdays(c,"")+"|^"+this.weekdaysShort(c,"")+"|^"+this.weekdaysMin(c,""),this._weekdaysParse[b]=new RegExp(d.replace(".",""),"i")),this._weekdaysParse[b].test(a))return b}function Pb(a){var b=this._isUTC?this._d.getUTCDay():this._d.getDay();return null!=a?(a=Kb(a,this.localeData()),this.add(a-b,"d")):b}function Qb(a){var b=(this.day()+7-this.localeData()._week.dow)%7;return null==a?b:this.add(a-b,"d")}function Rb(a){return null==a?this.day()||7:this.day(this.day()%7?a:a-7)}function Sb(a,b){H(a,0,0,function(){return this.localeData().meridiem(this.hours(),this.minutes(),b)})}function Tb(a,b){return b._meridiemParse}function Ub(a){return"p"===(a+"").toLowerCase().charAt(0)}function Vb(a,b,c){return a>11?c?"pm":"PM":c?"am":"AM"}function Wb(a,b){b[ld]=q(1e3*("0."+a))}function Xb(){return this._isUTC?"UTC":""}function Yb(){return this._isUTC?"Coordinated Universal Time":""}function Zb(a){return Da(1e3*a)}function $b(){return Da.apply(null,arguments).parseZone()}function _b(a,b,c){var d=this._calendar[a];return"function"==typeof d?d.call(b,c):d}function ac(a){var b=this._longDateFormat[a],c=this._longDateFormat[a.toUpperCase()];return b||!c?b:(this._longDateFormat[a]=c.replace(/MMMM|MM|DD|dddd/g,function(a){return a.slice(1)}),this._longDateFormat[a])}function bc(){return this._invalidDate}function cc(a){return this._ordinal.replace("%d",a)}function dc(a){return a}function ec(a,b,c,d){var e=this._relativeTime[c];return"function"==typeof e?e(a,b,c,d):e.replace(/%d/i,a)}function fc(a,b){var c=this._relativeTime[a>0?"future":"past"];return"function"==typeof c?c(b):c.replace(/%s/i,b)}function gc(a){var b,c;for(c in a)b=a[c],"function"==typeof b?this[c]=b:this["_"+c]=b;this._ordinalParseLenient=new RegExp(this._ordinalParse.source+"|"+/\d{1,2}/.source)}function hc(a,b,c,d){var e=y(),f=h().set(d,b);return e[c](f,a)}function ic(a,b,c,d,e){if("number"==typeof a&&(b=a,a=void 0),a=a||"",null!=b)return hc(a,b,c,e);var f,g=[];for(f=0;d>f;f++)g[f]=hc(a,f,c,e);return g}function jc(a,b){return ic(a,b,"months",12,"month")}function kc(a,b){return ic(a,b,"monthsShort",12,"month")}function lc(a,b){return ic(a,b,"weekdays",7,"day")}function mc(a,b){return ic(a,b,"weekdaysShort",7,"day")}function nc(a,b){return ic(a,b,"weekdaysMin",7,"day")}function oc(){var a=this._data;return this._milliseconds=Wd(this._milliseconds),this._days=Wd(this._days),this._months=Wd(this._months),a.milliseconds=Wd(a.milliseconds),a.seconds=Wd(a.seconds),a.minutes=Wd(a.minutes),a.hours=Wd(a.hours),a.months=Wd(a.months),a.years=Wd(a.years),this}function pc(a,b,c,d){var e=Ya(b,c);return a._milliseconds+=d*e._milliseconds,a._days+=d*e._days,a._months+=d*e._months,a._bubble()}function qc(a,b){return pc(this,a,b,1)}function rc(a,b){return pc(this,a,b,-1)}function sc(a){return 0>a?Math.floor(a):Math.ceil(a)}function tc(){var a,b,c,d,e,f=this._milliseconds,g=this._days,h=this._months,i=this._data;return f>=0&&g>=0&&h>=0||0>=f&&0>=g&&0>=h||(f+=864e5*sc(vc(h)+g),g=0,h=0),i.milliseconds=f%1e3,a=p(f/1e3),i.seconds=a%60,b=p(a/60),i.minutes=b%60,c=p(b/60),i.hours=c%24,g+=p(c/24),e=p(uc(g)),h+=e,g-=sc(vc(e)),d=p(h/12),h%=12,i.days=g,i.months=h,i.years=d,this}function uc(a){return 4800*a/146097}function vc(a){return 146097*a/4800}function wc(a){var b,c,d=this._milliseconds;if(a=A(a),"month"===a||"year"===a)return b=this._days+d/864e5,c=this._months+uc(b),"month"===a?c:c/12;switch(b=this._days+Math.round(vc(this._months)),a){case"week":return b/7+d/6048e5;case"day":return b+d/864e5;case"hour":return 24*b+d/36e5;case"minute":return 1440*b+d/6e4;case"second":return 86400*b+d/1e3;case"millisecond":return Math.floor(864e5*b)+d;default:throw new Error("Unknown unit "+a)}}function xc(){return this._milliseconds+864e5*this._days+this._months%12*2592e6+31536e6*q(this._months/12)}function yc(a){return function(){return this.as(a)}}function zc(a){return a=A(a),this[a+"s"]()}function Ac(a){return function(){return this._data[a]}}function Bc(){return p(this.days()/7)}function Cc(a,b,c,d,e){return e.relativeTime(b||1,!!c,a,d)}function Dc(a,b,c){var d=Ya(a).abs(),e=ke(d.as("s")),f=ke(d.as("m")),g=ke(d.as("h")),h=ke(d.as("d")),i=ke(d.as("M")),j=ke(d.as("y")),k=e0,k[4]=c,Cc.apply(null,k)}function Ec(a,b){return void 0===le[a]?!1:void 0===b?le[a]:(le[a]=b,!0)}function Fc(a){var b=this.localeData(),c=Dc(this,!a,b);return a&&(c=b.pastFuture(+this,c)),b.postformat(c)}function Gc(){var a,b,c,d=me(this._milliseconds)/1e3,e=me(this._days),f=me(this._months);a=p(d/60),b=p(a/60),d%=60,a%=60,c=p(f/12),f%=12;var g=c,h=f,i=e,j=b,k=a,l=d,m=this.asSeconds();return m?(0>m?"-":"")+"P"+(g?g+"Y":"")+(h?h+"M":"")+(i?i+"D":"")+(j||k||l?"T":"")+(j?j+"H":"")+(k?k+"M":"")+(l?l+"S":""):"P0D"}var Hc,Ic,Jc=a.momentProperties=[],Kc=!1,Lc={},Mc={},Nc=/(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Q|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|S{1,9}|x|X|zz?|ZZ?|.)/g,Oc=/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,Pc={},Qc={},Rc=/\d/,Sc=/\d\d/,Tc=/\d{3}/,Uc=/\d{4}/,Vc=/[+-]?\d{6}/,Wc=/\d\d?/,Xc=/\d{1,3}/,Yc=/\d{1,4}/,Zc=/[+-]?\d{1,6}/,$c=/\d+/,_c=/[+-]?\d+/,ad=/Z|[+-]\d\d:?\d\d/gi,bd=/[+-]?\d+(\.\d{1,3})?/,cd=/[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i,dd={},ed={},fd=0,gd=1,hd=2,id=3,jd=4,kd=5,ld=6;H("M",["MM",2],"Mo",function(){return this.month()+1}),H("MMM",0,0,function(a){return this.localeData().monthsShort(this,a)}),H("MMMM",0,0,function(a){return this.localeData().months(this,a)}),z("month","M"),N("M",Wc),N("MM",Wc,Sc),N("MMM",cd),N("MMMM",cd),Q(["M","MM"],function(a,b){b[gd]=q(a)-1}),Q(["MMM","MMMM"],function(a,b,c,d){var e=c._locale.monthsParse(a,d,c._strict);null!=e?b[gd]=e:j(c).invalidMonth=a});var md="January_February_March_April_May_June_July_August_September_October_November_December".split("_"),nd="Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),od={};a.suppressDeprecationWarnings=!1;var pd=/^\s*(?:[+-]\d{6}|\d{4})-(?:(\d\d-\d\d)|(W\d\d$)|(W\d\d-\d)|(\d\d\d))((T| )(\d\d(:\d\d(:\d\d(\.\d+)?)?)?)?([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/,qd=[["YYYYYY-MM-DD",/[+-]\d{6}-\d{2}-\d{2}/],["YYYY-MM-DD",/\d{4}-\d{2}-\d{2}/],["GGGG-[W]WW-E",/\d{4}-W\d{2}-\d/],["GGGG-[W]WW",/\d{4}-W\d{2}/],["YYYY-DDD",/\d{4}-\d{3}/]],rd=[["HH:mm:ss.SSSS",/(T| )\d\d:\d\d:\d\d\.\d+/],["HH:mm:ss",/(T| )\d\d:\d\d:\d\d/],["HH:mm",/(T| )\d\d:\d\d/],["HH",/(T| )\d\d/]],sd=/^\/?Date\((\-?\d+)/i;a.createFromInputFallback=aa("moment construction falls back to js Date. This is discouraged and will be removed in upcoming major release. Please refer to https://github.com/moment/moment/issues/1407 for more info.",function(a){a._d=new Date(a._i+(a._useUTC?" UTC":""))}),H(0,["YY",2],0,function(){return this.year()%100}),H(0,["YYYY",4],0,"year"),H(0,["YYYYY",5],0,"year"),H(0,["YYYYYY",6,!0],0,"year"),z("year","y"),N("Y",_c),N("YY",Wc,Sc),N("YYYY",Yc,Uc),N("YYYYY",Zc,Vc),N("YYYYYY",Zc,Vc),Q(["YYYYY","YYYYYY"],fd),Q("YYYY",function(b,c){c[fd]=2===b.length?a.parseTwoDigitYear(b):q(b)}),Q("YY",function(b,c){c[fd]=a.parseTwoDigitYear(b)}),a.parseTwoDigitYear=function(a){return q(a)+(q(a)>68?1900:2e3)};var td=C("FullYear",!1);H("w",["ww",2],"wo","week"),H("W",["WW",2],"Wo","isoWeek"),z("week","w"),z("isoWeek","W"),N("w",Wc),N("ww",Wc,Sc),N("W",Wc),N("WW",Wc,Sc),R(["w","ww","W","WW"],function(a,b,c,d){b[d.substr(0,1)]=q(a)});var ud={dow:0,doy:6};H("DDD",["DDDD",3],"DDDo","dayOfYear"),z("dayOfYear","DDD"),N("DDD",Xc),N("DDDD",Tc),Q(["DDD","DDDD"],function(a,b,c){c._dayOfYear=q(a)}),a.ISO_8601=function(){};var vd=aa("moment().min is deprecated, use moment.min instead. https://github.com/moment/moment/issues/1548",function(){var a=Da.apply(null,arguments);return this>a?this:a}),wd=aa("moment().max is deprecated, use moment.max instead. https://github.com/moment/moment/issues/1548",function(){var a=Da.apply(null,arguments);return a>this?this:a});Ja("Z",":"),Ja("ZZ",""),N("Z",ad),N("ZZ",ad),Q(["Z","ZZ"],function(a,b,c){c._useUTC=!0,c._tzm=Ka(a)});var xd=/([\+\-]|\d\d)/gi;a.updateOffset=function(){};var yd=/(\-)?(?:(\d*)\.)?(\d+)\:(\d+)(?:\:(\d+)\.?(\d{3})?)?/,zd=/^(-)?P(?:(?:([0-9,.]*)Y)?(?:([0-9,.]*)M)?(?:([0-9,.]*)D)?(?:T(?:([0-9,.]*)H)?(?:([0-9,.]*)M)?(?:([0-9,.]*)S)?)?|([0-9,.]*)W)$/;Ya.fn=Ha.prototype;var Ad=ab(1,"add"),Bd=ab(-1,"subtract");a.defaultFormat="YYYY-MM-DDTHH:mm:ssZ";var Cd=aa("moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.",function(a){return void 0===a?this.localeData():this.locale(a)});H(0,["gg",2],0,function(){return this.weekYear()%100}),H(0,["GG",2],0,function(){return this.isoWeekYear()%100}),Db("gggg","weekYear"),Db("ggggg","weekYear"),Db("GGGG","isoWeekYear"),Db("GGGGG","isoWeekYear"),z("weekYear","gg"),z("isoWeekYear","GG"),N("G",_c),N("g",_c),N("GG",Wc,Sc),N("gg",Wc,Sc),N("GGGG",Yc,Uc),N("gggg",Yc,Uc),N("GGGGG",Zc,Vc),N("ggggg",Zc,Vc),R(["gggg","ggggg","GGGG","GGGGG"],function(a,b,c,d){b[d.substr(0,2)]=q(a)}),R(["gg","GG"],function(b,c,d,e){c[e]=a.parseTwoDigitYear(b)}),H("Q",0,0,"quarter"),z("quarter","Q"),N("Q",Rc),Q("Q",function(a,b){b[gd]=3*(q(a)-1)}),H("D",["DD",2],"Do","date"),z("date","D"),N("D",Wc),N("DD",Wc,Sc),N("Do",function(a,b){return a?b._ordinalParse:b._ordinalParseLenient}),Q(["D","DD"],hd),Q("Do",function(a,b){b[hd]=q(a.match(Wc)[0],10)});var Dd=C("Date",!0);H("d",0,"do","day"),H("dd",0,0,function(a){return this.localeData().weekdaysMin(this,a)}),H("ddd",0,0,function(a){return this.localeData().weekdaysShort(this,a)}),H("dddd",0,0,function(a){return this.localeData().weekdays(this,a)}),H("e",0,0,"weekday"),H("E",0,0,"isoWeekday"),z("day","d"),z("weekday","e"),z("isoWeekday","E"),N("d",Wc),N("e",Wc),N("E",Wc),N("dd",cd),N("ddd",cd),N("dddd",cd),R(["dd","ddd","dddd"],function(a,b,c){var d=c._locale.weekdaysParse(a);null!=d?b.d=d:j(c).invalidWeekday=a}),R(["d","e","E"],function(a,b,c,d){b[d]=q(a)});var Ed="Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),Fd="Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),Gd="Su_Mo_Tu_We_Th_Fr_Sa".split("_");H("H",["HH",2],0,"hour"),H("h",["hh",2],0,function(){return this.hours()%12||12}),Sb("a",!0),Sb("A",!1),z("hour","h"),N("a",Tb),N("A",Tb),N("H",Wc),N("h",Wc),N("HH",Wc,Sc),N("hh",Wc,Sc),Q(["H","HH"],id),Q(["a","A"],function(a,b,c){c._isPm=c._locale.isPM(a),c._meridiem=a}),Q(["h","hh"],function(a,b,c){b[id]=q(a),j(c).bigHour=!0});var Hd=/[ap]\.?m?\.?/i,Id=C("Hours",!0);H("m",["mm",2],0,"minute"),z("minute","m"),N("m",Wc),N("mm",Wc,Sc),Q(["m","mm"],jd);var Jd=C("Minutes",!1);H("s",["ss",2],0,"second"),z("second","s"),N("s",Wc),N("ss",Wc,Sc),Q(["s","ss"],kd);var Kd=C("Seconds",!1);H("S",0,0,function(){return~~(this.millisecond()/100)}),H(0,["SS",2],0,function(){return~~(this.millisecond()/10)}),H(0,["SSS",3],0,"millisecond"),H(0,["SSSS",4],0,function(){return 10*this.millisecond()}),H(0,["SSSSS",5],0,function(){return 100*this.millisecond()}),H(0,["SSSSSS",6],0,function(){return 1e3*this.millisecond()}),H(0,["SSSSSSS",7],0,function(){return 1e4*this.millisecond()}),H(0,["SSSSSSSS",8],0,function(){return 1e5*this.millisecond()}),H(0,["SSSSSSSSS",9],0,function(){return 1e6*this.millisecond()}),z("millisecond","ms"),N("S",Xc,Rc),N("SS",Xc,Sc),N("SSS",Xc,Tc);var Ld;for(Ld="SSSS";Ld.length<=9;Ld+="S")N(Ld,$c);for(Ld="S";Ld.length<=9;Ld+="S")Q(Ld,Wb);var Md=C("Milliseconds",!1);H("z",0,0,"zoneAbbr"),H("zz",0,0,"zoneName");var Nd=n.prototype;Nd.add=Ad,Nd.calendar=cb,Nd.clone=db,Nd.diff=ib,Nd.endOf=ub,Nd.format=mb,Nd.from=nb,Nd.fromNow=ob,Nd.to=pb,Nd.toNow=qb,Nd.get=F,Nd.invalidAt=Cb,Nd.isAfter=eb,Nd.isBefore=fb,Nd.isBetween=gb,Nd.isSame=hb,Nd.isValid=Ab,Nd.lang=Cd,Nd.locale=rb,Nd.localeData=sb,Nd.max=wd,Nd.min=vd,Nd.parsingFlags=Bb,Nd.set=F,Nd.startOf=tb,Nd.subtract=Bd,Nd.toArray=yb,Nd.toObject=zb,Nd.toDate=xb,Nd.toISOString=lb,Nd.toJSON=lb,Nd.toString=kb,Nd.unix=wb,Nd.valueOf=vb,Nd.year=td,Nd.isLeapYear=ia,Nd.weekYear=Fb,Nd.isoWeekYear=Gb,Nd.quarter=Nd.quarters=Jb,Nd.month=Y,Nd.daysInMonth=Z,Nd.week=Nd.weeks=na,Nd.isoWeek=Nd.isoWeeks=oa,Nd.weeksInYear=Ib,Nd.isoWeeksInYear=Hb,Nd.date=Dd,Nd.day=Nd.days=Pb,Nd.weekday=Qb,Nd.isoWeekday=Rb,Nd.dayOfYear=qa,Nd.hour=Nd.hours=Id,Nd.minute=Nd.minutes=Jd,Nd.second=Nd.seconds=Kd, -Nd.millisecond=Nd.milliseconds=Md,Nd.utcOffset=Na,Nd.utc=Pa,Nd.local=Qa,Nd.parseZone=Ra,Nd.hasAlignedHourOffset=Sa,Nd.isDST=Ta,Nd.isDSTShifted=Ua,Nd.isLocal=Va,Nd.isUtcOffset=Wa,Nd.isUtc=Xa,Nd.isUTC=Xa,Nd.zoneAbbr=Xb,Nd.zoneName=Yb,Nd.dates=aa("dates accessor is deprecated. Use date instead.",Dd),Nd.months=aa("months accessor is deprecated. Use month instead",Y),Nd.years=aa("years accessor is deprecated. Use year instead",td),Nd.zone=aa("moment().zone is deprecated, use moment().utcOffset instead. https://github.com/moment/moment/issues/1779",Oa);var Od=Nd,Pd={sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},Qd={LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY h:mm A",LLLL:"dddd, MMMM D, YYYY h:mm A"},Rd="Invalid date",Sd="%d",Td=/\d{1,2}/,Ud={future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},Vd=s.prototype;Vd._calendar=Pd,Vd.calendar=_b,Vd._longDateFormat=Qd,Vd.longDateFormat=ac,Vd._invalidDate=Rd,Vd.invalidDate=bc,Vd._ordinal=Sd,Vd.ordinal=cc,Vd._ordinalParse=Td,Vd.preparse=dc,Vd.postformat=dc,Vd._relativeTime=Ud,Vd.relativeTime=ec,Vd.pastFuture=fc,Vd.set=gc,Vd.months=U,Vd._months=md,Vd.monthsShort=V,Vd._monthsShort=nd,Vd.monthsParse=W,Vd.week=ka,Vd._week=ud,Vd.firstDayOfYear=ma,Vd.firstDayOfWeek=la,Vd.weekdays=Lb,Vd._weekdays=Ed,Vd.weekdaysMin=Nb,Vd._weekdaysMin=Gd,Vd.weekdaysShort=Mb,Vd._weekdaysShort=Fd,Vd.weekdaysParse=Ob,Vd.isPM=Ub,Vd._meridiemParse=Hd,Vd.meridiem=Vb,w("en",{ordinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(a){var b=a%10,c=1===q(a%100/10)?"th":1===b?"st":2===b?"nd":3===b?"rd":"th";return a+c}}),a.lang=aa("moment.lang is deprecated. Use moment.locale instead.",w),a.langData=aa("moment.langData is deprecated. Use moment.localeData instead.",y);var Wd=Math.abs,Xd=yc("ms"),Yd=yc("s"),Zd=yc("m"),$d=yc("h"),_d=yc("d"),ae=yc("w"),be=yc("M"),ce=yc("y"),de=Ac("milliseconds"),ee=Ac("seconds"),fe=Ac("minutes"),ge=Ac("hours"),he=Ac("days"),ie=Ac("months"),je=Ac("years"),ke=Math.round,le={s:45,m:45,h:22,d:26,M:11},me=Math.abs,ne=Ha.prototype;ne.abs=oc,ne.add=qc,ne.subtract=rc,ne.as=wc,ne.asMilliseconds=Xd,ne.asSeconds=Yd,ne.asMinutes=Zd,ne.asHours=$d,ne.asDays=_d,ne.asWeeks=ae,ne.asMonths=be,ne.asYears=ce,ne.valueOf=xc,ne._bubble=tc,ne.get=zc,ne.milliseconds=de,ne.seconds=ee,ne.minutes=fe,ne.hours=ge,ne.days=he,ne.weeks=Bc,ne.months=ie,ne.years=je,ne.humanize=Fc,ne.toISOString=Gc,ne.toString=Gc,ne.toJSON=Gc,ne.locale=rb,ne.localeData=sb,ne.toIsoString=aa("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",Gc),ne.lang=Cd,H("X",0,0,"unix"),H("x",0,0,"valueOf"),N("x",_c),N("X",bd),Q("X",function(a,b,c){c._d=new Date(1e3*parseFloat(a,10))}),Q("x",function(a,b,c){c._d=new Date(q(a))}),a.version="2.10.6",b(Da),a.fn=Od,a.min=Fa,a.max=Ga,a.utc=h,a.unix=Zb,a.months=jc,a.isDate=d,a.locale=w,a.invalid=l,a.duration=Ya,a.isMoment=o,a.weekdays=lc,a.parseZone=$b,a.localeData=y,a.isDuration=Ia,a.monthsShort=kc,a.weekdaysMin=nc,a.defineLocale=x,a.weekdaysShort=mc,a.normalizeUnits=A,a.relativeTimeThreshold=Ec;var oe=a;return oe}); \ No newline at end of file +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):e.moment=t()}(this,function(){"use strict";var H;function _(){return H.apply(null,arguments)}function y(e){return e instanceof Array||"[object Array]"===Object.prototype.toString.call(e)}function F(e){return null!=e&&"[object Object]"===Object.prototype.toString.call(e)}function c(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function L(e){if(Object.getOwnPropertyNames)return 0===Object.getOwnPropertyNames(e).length;for(var t in e)if(c(e,t))return;return 1}function g(e){return void 0===e}function w(e){return"number"==typeof e||"[object Number]"===Object.prototype.toString.call(e)}function V(e){return e instanceof Date||"[object Date]"===Object.prototype.toString.call(e)}function G(e,t){for(var n=[],s=e.length,i=0;i>>0,s=0;sWe(e)?(r=e+1,t-We(e)):(r=e,t);return{year:r,dayOfYear:n}}function Be(e,t,n){var s,i,r=qe(e.year(),t,n),r=Math.floor((e.dayOfYear()-r-1)/7)+1;return r<1?s=r+N(i=e.year()-1,t,n):r>N(e.year(),t,n)?(s=r-N(e.year(),t,n),i=e.year()+1):(i=e.year(),s=r),{week:s,year:i}}function N(e,t,n){var s=qe(e,t,n),t=qe(e+1,t,n);return(We(e)-s+t)/7}s("w",["ww",2],"wo","week"),s("W",["WW",2],"Wo","isoWeek"),h("w",n,u),h("ww",n,t),h("W",n,u),h("WW",n,t),Oe(["w","ww","W","WW"],function(e,t,n,s){t[s.substr(0,1)]=M(e)});function Je(e,t){return e.slice(t,7).concat(e.slice(0,t))}s("d",0,"do","day"),s("dd",0,0,function(e){return this.localeData().weekdaysMin(this,e)}),s("ddd",0,0,function(e){return this.localeData().weekdaysShort(this,e)}),s("dddd",0,0,function(e){return this.localeData().weekdays(this,e)}),s("e",0,0,"weekday"),s("E",0,0,"isoWeekday"),h("d",n),h("e",n),h("E",n),h("dd",function(e,t){return t.weekdaysMinRegex(e)}),h("ddd",function(e,t){return t.weekdaysShortRegex(e)}),h("dddd",function(e,t){return t.weekdaysRegex(e)}),Oe(["dd","ddd","dddd"],function(e,t,n,s){s=n._locale.weekdaysParse(e,s,n._strict);null!=s?t.d=s:p(n).invalidWeekday=e}),Oe(["d","e","E"],function(e,t,n,s){t[s]=M(e)});var Qe="Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),Xe="Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),Ke="Su_Mo_Tu_We_Th_Fr_Sa".split("_"),et=i,tt=i,nt=i;function st(){function e(e,t){return t.length-e.length}for(var t,n,s,i=[],r=[],a=[],o=[],u=0;u<7;u++)s=l([2e3,1]).day(u),t=f(this.weekdaysMin(s,"")),n=f(this.weekdaysShort(s,"")),s=f(this.weekdays(s,"")),i.push(t),r.push(n),a.push(s),o.push(t),o.push(n),o.push(s);i.sort(e),r.sort(e),a.sort(e),o.sort(e),this._weekdaysRegex=new RegExp("^("+o.join("|")+")","i"),this._weekdaysShortRegex=this._weekdaysRegex,this._weekdaysMinRegex=this._weekdaysRegex,this._weekdaysStrictRegex=new RegExp("^("+a.join("|")+")","i"),this._weekdaysShortStrictRegex=new RegExp("^("+r.join("|")+")","i"),this._weekdaysMinStrictRegex=new RegExp("^("+i.join("|")+")","i")}function it(){return this.hours()%12||12}function rt(e,t){s(e,0,0,function(){return this.localeData().meridiem(this.hours(),this.minutes(),t)})}function at(e,t){return t._meridiemParse}s("H",["HH",2],0,"hour"),s("h",["hh",2],0,it),s("k",["kk",2],0,function(){return this.hours()||24}),s("hmm",0,0,function(){return""+it.apply(this)+r(this.minutes(),2)}),s("hmmss",0,0,function(){return""+it.apply(this)+r(this.minutes(),2)+r(this.seconds(),2)}),s("Hmm",0,0,function(){return""+this.hours()+r(this.minutes(),2)}),s("Hmmss",0,0,function(){return""+this.hours()+r(this.minutes(),2)+r(this.seconds(),2)}),rt("a",!0),rt("A",!1),h("a",at),h("A",at),h("H",n,d),h("h",n,u),h("k",n,u),h("HH",n,t),h("hh",n,t),h("kk",n,t),h("hmm",me),h("hmmss",_e),h("Hmm",me),h("Hmmss",_e),v(["H","HH"],O),v(["k","kk"],function(e,t,n){e=M(e);t[O]=24===e?0:e}),v(["a","A"],function(e,t,n){n._isPm=n._locale.isPM(e),n._meridiem=e}),v(["h","hh"],function(e,t,n){t[O]=M(e),p(n).bigHour=!0}),v("hmm",function(e,t,n){var s=e.length-2;t[O]=M(e.substr(0,s)),t[b]=M(e.substr(s)),p(n).bigHour=!0}),v("hmmss",function(e,t,n){var s=e.length-4,i=e.length-2;t[O]=M(e.substr(0,s)),t[b]=M(e.substr(s,2)),t[T]=M(e.substr(i)),p(n).bigHour=!0}),v("Hmm",function(e,t,n){var s=e.length-2;t[O]=M(e.substr(0,s)),t[b]=M(e.substr(s))}),v("Hmmss",function(e,t,n){var s=e.length-4,i=e.length-2;t[O]=M(e.substr(0,s)),t[b]=M(e.substr(s,2)),t[T]=M(e.substr(i))});i=Re("Hours",!0);var ot,ut={calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},longDateFormat:{LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY h:mm A",LLLL:"dddd, MMMM D, YYYY h:mm A"},invalidDate:"Invalid date",ordinal:"%d",dayOfMonthOrdinalParse:/\d{1,2}/,relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",ss:"%d seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",w:"a week",ww:"%d weeks",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},months:Fe,monthsShort:Le,week:{dow:0,doy:6},weekdays:Qe,weekdaysMin:Ke,weekdaysShort:Xe,meridiemParse:/[ap]\.?m?\.?/i},W={},lt={};function dt(e){return e&&e.toLowerCase().replace("_","-")}function ht(e){for(var t,n,s,i,r=0;r=t&&function(e,t){for(var n=Math.min(e.length,t.length),s=0;s=t-1)break;t--}r++}return ot}function ct(t){var e,n;if(void 0===W[t]&&"undefined"!=typeof module&&module&&module.exports&&(n=t)&&n.match("^[^/\\\\]*$"))try{e=ot._abbr,require("./locale/"+t),ft(e)}catch(e){W[t]=null}return W[t]}function ft(e,t){return e&&((t=g(t)?P(e):mt(e,t))?ot=t:"undefined"!=typeof console&&console.warn&&console.warn("Locale "+e+" not found. Did you forget to load it?")),ot._abbr}function mt(e,t){if(null===t)return delete W[e],null;var n,s=ut;if(t.abbr=e,null!=W[e])Q("defineLocaleOverride","use moment.updateLocale(localeName, config) to change an existing locale. moment.defineLocale(localeName, config) should only be used for creating a new locale See http://momentjs.com/guides/#/warnings/define-locale/ for more info."),s=W[e]._config;else if(null!=t.parentLocale)if(null!=W[t.parentLocale])s=W[t.parentLocale]._config;else{if(null==(n=ct(t.parentLocale)))return lt[t.parentLocale]||(lt[t.parentLocale]=[]),lt[t.parentLocale].push({name:e,config:t}),null;s=n._config}return W[e]=new K(X(s,t)),lt[e]&<[e].forEach(function(e){mt(e.name,e.config)}),ft(e),W[e]}function P(e){var t;if(!(e=e&&e._locale&&e._locale._abbr?e._locale._abbr:e))return ot;if(!y(e)){if(t=ct(e))return t;e=[e]}return ht(e)}function _t(e){var t=e._a;return t&&-2===p(e).overflow&&(t=t[Y]<0||11He(t[D],t[Y])?S:t[O]<0||24N(r,u,l)?p(s)._overflowWeeks=!0:null!=d?p(s)._overflowWeekday=!0:(h=$e(r,a,o,u,l),s._a[D]=h.year,s._dayOfYear=h.dayOfYear)),null!=e._dayOfYear&&(i=bt(e._a[D],n[D]),(e._dayOfYear>We(i)||0===e._dayOfYear)&&(p(e)._overflowDayOfYear=!0),d=ze(i,0,e._dayOfYear),e._a[Y]=d.getUTCMonth(),e._a[S]=d.getUTCDate()),t=0;t<3&&null==e._a[t];++t)e._a[t]=c[t]=n[t];for(;t<7;t++)e._a[t]=c[t]=null==e._a[t]?2===t?1:0:e._a[t];24===e._a[O]&&0===e._a[b]&&0===e._a[T]&&0===e._a[Te]&&(e._nextDay=!0,e._a[O]=0),e._d=(e._useUTC?ze:Ze).apply(null,c),r=e._useUTC?e._d.getUTCDay():e._d.getDay(),null!=e._tzm&&e._d.setUTCMinutes(e._d.getUTCMinutes()-e._tzm),e._nextDay&&(e._a[O]=24),e._w&&void 0!==e._w.d&&e._w.d!==r&&(p(e).weekdayMismatch=!0)}}function xt(e){if(e._f===_.ISO_8601)Yt(e);else if(e._f===_.RFC_2822)Ot(e);else{e._a=[],p(e).empty=!0;for(var t,n,s,i,r,a=""+e._i,o=a.length,u=0,l=ae(e._f,e._locale).match(te)||[],d=l.length,h=0;he.valueOf():e.valueOf()"}),u.toJSON=function(){return this.isValid()?this.toISOString():null},u.toString=function(){return this.clone().locale("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")},u.unix=function(){return Math.floor(this.valueOf()/1e3)},u.valueOf=function(){return this._d.valueOf()-6e4*(this._offset||0)},u.creationData=function(){return{input:this._i,format:this._f,locale:this._locale,isUTC:this._isUTC,strict:this._strict}},u.eraName=function(){for(var e,t=this.localeData().eras(),n=0,s=t.length;nthis.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()},u.isLocal=function(){return!!this.isValid()&&!this._isUTC},u.isUtcOffset=function(){return!!this.isValid()&&this._isUTC},u.isUtc=At,u.isUTC=At,u.zoneAbbr=function(){return this._isUTC?"UTC":""},u.zoneName=function(){return this._isUTC?"Coordinated Universal Time":""},u.dates=e("dates accessor is deprecated. Use date instead.",ge),u.months=e("months accessor is deprecated. Use month instead",Ie),u.years=e("years accessor is deprecated. Use year instead",Pe),u.zone=e("moment().zone is deprecated, use moment().utcOffset instead. http://momentjs.com/guides/#/warnings/zone/",function(e,t){return null!=e?(this.utcOffset(e="string"!=typeof e?-e:e,t),this):-this.utcOffset()}),u.isDSTShifted=e("isDSTShifted is deprecated. See http://momentjs.com/guides/#/warnings/dst-shifted/ for more information",function(){var e,t;return g(this._isDSTShifted)&&(q(e={},this),(e=Nt(e))._a?(t=(e._isUTC?l:R)(e._a),this._isDSTShifted=this.isValid()&&0 0) { + grCols[c].items.push({ + xtype: 'button', + text: _t('Show more results...'), + handler: function(t) { + t.hide(); + // will show the next page by looking at this.lastShown + me.updateGraphs() + } + }) + } this.add(grCols); } }, this); diff --git a/Products/ZenUI3/browser/resources/js/zenoss/itinfrastructure.js b/Products/ZenUI3/browser/resources/js/zenoss/itinfrastructure.js index 7d59182e2f..2fe10ba619 100644 --- a/Products/ZenUI3/browser/resources/js/zenoss/itinfrastructure.js +++ b/Products/ZenUI3/browser/resources/js/zenoss/itinfrastructure.js @@ -378,7 +378,7 @@ Ext.onReady(function () { permission: 'Delete Device', handler: function () { var selnode = getSelectionModel().getSelectedNode(), - isclass = Zenoss.types.type(selnode.data.uid) === 'DeviceClass', + isclass = selnode.data.uid.includes('Device'), grpText = selnode.data.text.text; var win = new Zenoss.FormDialog({ title: _t('Remove Devices'), @@ -1683,6 +1683,7 @@ Ext.onReady(function () { handler: function () { var grid = Ext.getCmp('device_grid'); if (grid.isVisible(true)) { + grid.getStore().reload(); grid.refresh(); Ext.getCmp('organizer_events').refresh(); refreshTreePanel(); diff --git a/Products/ZenUI3/browser/search/__init__.py b/Products/ZenUI3/browser/search/__init__.py index bb3b7f9189..3354e795cb 100644 --- a/Products/ZenUI3/browser/search/__init__.py +++ b/Products/ZenUI3/browser/search/__init__.py @@ -1,16 +1,16 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from Products.Zuul import getFacade from Products.ZenUI3.browser.javascript import JavaScriptSrcViewlet + class IncludeSearchBox(JavaScriptSrcViewlet): """ Checks for the existence of search providers. If there are none, @@ -20,7 +20,7 @@ class IncludeSearchBox(JavaScriptSrcViewlet): path = "/++resource++search/zenoss-search.js" def render(self): - if not getFacade('search').noProvidersPresent(): - return super(IncludeSearchBox,self).render() + if not getFacade("search").noProvidersPresent(): + return super(IncludeSearchBox, self).render() else: - return '' + return "" diff --git a/Products/ZenUI3/browser/stats/__init__.py b/Products/ZenUI3/browser/stats/__init__.py index d67ac747c0..d14c439e9f 100644 --- a/Products/ZenUI3/browser/stats/__init__.py +++ b/Products/ZenUI3/browser/stats/__init__.py @@ -1,11 +1,8 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2012, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - - - diff --git a/Products/ZenUI3/browser/stats/stats.py b/Products/ZenUI3/browser/stats/stats.py index 10e3b723ec..48ffee7597 100644 --- a/Products/ZenUI3/browser/stats/stats.py +++ b/Products/ZenUI3/browser/stats/stats.py @@ -1,37 +1,43 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2012, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -from Products.Five.browser import BrowserView import collections import json -import time import logging -import subprocess -import sys import os import re -from zope import interface -from zope import component -from ZServer.PubCore.ZRendezvous import ZRendevous +import subprocess +import sys +import time + import ZPublisher.interfaces +from Products.Five.browser import BrowserView +from zope import component, interface +from ZServer.PubCore.ZRendezvous import ZRendevous + from Products.Zuul import interfaces from Products.ZenModel.DataRoot import DataRoot from Products.ZenUtils.cstat import CStat -_LOG = logging.getLogger('zen.stats') +from Products.ZenUtils.GlobalConfig import getGlobalConfiguration +from Products.ZenUtils.requestlogging.ZopeRequestLogger import ( + ZopeRequestLogger, +) + +_LOG = logging.getLogger("zen.stats") -from Products.ZenUtils.requestlogging.ZopeRequestLogger import ZopeRequestLogger _request_logger = ZopeRequestLogger() -from Products.ZenUtils.GlobalConfig import getGlobalConfiguration -_request_threshold = float(getGlobalConfiguration().get('zope-request-threshold', 5.0)) +_request_threshold = float( + getGlobalConfiguration().get("zope-request-threshold", 5.0) +) + # hook in to Web Server's Request Events so that # fine grained monitoring can be done @@ -39,6 +45,7 @@ def logRequestStart(event): event.request._start = time.time() + @component.adapter(ZPublisher.interfaces.IPubAfterTraversal) def logRequestStartAfterTraversal(event): # When IPubAfterTraversal is triggered the request body is available @@ -48,6 +55,7 @@ def logRequestStartAfterTraversal(event): except Exception: pass + @component.adapter(ZPublisher.interfaces.IPubEnd) def logRequestEnd(event): global _REQUEST_TOTAL, _REQUEST_COUNT, _REQUEST_TIME @@ -68,75 +76,88 @@ def logRequestEnd(event): except Exception: pass -_STATS_PERIOD = 60 * 15 # keep in-memory stats for 15 minutes -_REQUEST_TOTAL = 0 # running total of http requests -_REQUEST_COUNT = CStat(_STATS_PERIOD) # CStat of request count + +_STATS_PERIOD = 60 * 15 # keep in-memory stats for 15 minutes +_REQUEST_TOTAL = 0 # running total of http requests +_REQUEST_COUNT = CStat(_STATS_PERIOD) # CStat of request count _REQUEST_TIME = CStat(_STATS_PERIOD) # Cstat of request service times _SLOW_REQUEST_TOTAL = 0 _SLOW_REQUEST_COUNT = CStat(_STATS_PERIOD) _SLOW_REQUEST_TIME = CStat(_STATS_PERIOD) _BYTES_MAP = { - 'b': 1, - 'k': 1024, - 'kb': 1024, - 'm': 1024 * 1024, - 'mb': 1024 * 1024, - 'g': 1024 * 1024 * 1024, - 'gb': 1024 * 1024 * 1024, + "b": 1, + "k": 1024, + "kb": 1024, + "m": 1024 * 1024, + "mb": 1024 * 1024, + "g": 1024 * 1024 * 1024, + "gb": 1024 * 1024 * 1024, } + +@interface.implementer(interfaces.ISystemMetric) class _ZodbMetrics(object): """ Base class for reporting ZODB metrics. """ + component.adapts(DataRoot) - interface.implements(interfaces.ISystemMetric) db = None - + def __init__(self, context): self.context = context self._db = None - + def metrics(self): metrics = {} end = time.time() - start = end - 60 - db = self.context.unrestrictedTraverse('/Control_Panel/Database/%s' % self.db) + start = end - 60 + db = self.context.unrestrictedTraverse( + "/Control_Panel/Database/%s" % self.db + ) args = { - 'chart_start': start, - 'chart_end':end, + "chart_start": start, + "chart_end": end, } activityChart = db.getActivityChartData(200, args) - metrics['totalLoadCount'] = activityChart['total_load_count'] - metrics['totalStoreCount'] = activityChart['total_store_count'] - metrics['totalConnections'] = activityChart['total_connections'] - metrics['cacheLength'] = db.cache_length() - metrics['cacheSize'] = db.cache_size() + metrics["totalLoadCount"] = activityChart["total_load_count"] + metrics["totalStoreCount"] = activityChart["total_store_count"] + metrics["totalConnections"] = activityChart["total_connections"] + metrics["cacheLength"] = db.cache_length() + metrics["cacheSize"] = db.cache_size() # convert dbsize from string to bytes dbSize = db.db_size() - match = re.search("(?P[0-9]*\.?[0-9]*)(?P[^0-9]+)", dbSize) - metrics['databaseSize'] = int(float(match.group(1)) * _BYTES_MAP[match.group(2).lower()]) + match = re.search( + r"(?P[0-9]*\.?[0-9]*)(?P[^0-9]+)", dbSize + ) + metrics["databaseSize"] = int( + float(match.group(1)) * _BYTES_MAP[match.group(2).lower()] + ) return metrics - + + class MainZodbMetrics(_ZodbMetrics): db = "main" category = "ZODB_main" + class TempZodbMetrics(_ZodbMetrics): db = "temporary" category = "ZODB_temp" + +@interface.implementer(interfaces.ISystemMetric) class ZopeMetrics(object): """ ZopeMetrics reports metric related to the Zope server. """ + component.adapts(DataRoot) - interface.implements(interfaces.ISystemMetric) - + def __init__(self, context): self.context = context @@ -146,25 +167,29 @@ def metrics(self): metrics = {} # get total threads - metrics['totalThreads'] = len(sys._current_frames().keys()) + metrics["totalThreads"] = len(sys._current_frames().keys()) # get free threads freeThreads = 0 for frame in sys._current_frames().values(): - _self = frame.f_locals.get('self') - if getattr(_self, '__module__', None) == ZRendevous.__module__: + _self = frame.f_locals.get("self") + if getattr(_self, "__module__", None) == ZRendevous.__module__: freeThreads += 1 - metrics['freeThreads'] = freeThreads + metrics["freeThreads"] = freeThreads try: - metrics['activeSessions'] = len(self.context.unrestrictedTraverse('/temp_folder/session_data')) + metrics["activeSessions"] = len( + self.context.unrestrictedTraverse("/temp_folder/session_data") + ) except Exception: - metrics['activeSessions'] = -1 - + metrics["activeSessions"] = -1 + global _REQUEST_TOTAL, _REQUEST_COUNT, _REQUEST_TIME metrics["requestTotal"] = _REQUEST_TOTAL metrics["request1m"] = max(_REQUEST_COUNT.query(60), 1) - metrics["requestTimeAvg1m"] = _REQUEST_TIME.query(60) / float(metrics["request1m"]) + metrics["requestTimeAvg1m"] = _REQUEST_TIME.query(60) / float( + metrics["request1m"] + ) # Number of requests exceeding zope-request-threshold global _SLOW_REQUEST_COUNT, _SLOW_REQUEST_TIME, _SLOW_REQUEST_TOTAL @@ -172,46 +197,55 @@ def metrics(self): metrics["slowRequest1m"] = _SLOW_REQUEST_COUNT.query(60) try: - metrics["slowRequestTimeAvg1m"] = _SLOW_REQUEST_TIME.query(60) / float(metrics["slowRequest1m"]) + metrics["slowRequestTimeAvg1m"] = _SLOW_REQUEST_TIME.query( + 60 + ) / float(metrics["slowRequest1m"]) except ZeroDivisionError: # No slow requests metrics["slowRequestTimeAvg1m"] = 0.0 # request1m is always 1 or greater - metrics["slowRequestPercent"] = (float(metrics["slowRequest1m"]) / metrics["request1m"]) * 100 + metrics["slowRequestPercent"] = ( + float(metrics["slowRequest1m"]) / metrics["request1m"] + ) * 100 for key, value in self._getVmStats(): metrics[key] = value - + return metrics - + def _getVmStats(self): """ _getVmStats() retrives memory usage for the current process """ try: vmLines = subprocess.check_output( - "cat /proc/%d/status | egrep ^Vm" % os.getpid(), - shell=True) + "cat /proc/%d/status | egrep ^Vm" % os.getpid(), shell=True + ) for line in vmLines.splitlines(): rawStat, rawValue, unit = (line.split() + ["B"])[0:3] - stat = rawStat.split(':')[0] + stat = rawStat.split(":")[0] value = int(float(rawValue) * _BYTES_MAP[unit.lower()]) yield stat, value except subprocess.CalledProcessError as ex: _LOG.warn("Could not get memory info for current process: %s" % ex) + class StatsView(BrowserView): """ Provide a window in to this Zenoss Instance's performance stats. """ - + def __call__(self): metrics = collections.defaultdict(lambda: {}) - for subscriber in component.subscribers((self.context.dmd,), interfaces.ISystemMetric): + for subscriber in component.subscribers( + (self.context.dmd,), interfaces.ISystemMetric + ): try: metrics[subscriber.category].update(subscriber.metrics()) except Exception as ex: - _LOG.warn("An error occurred gathering performance stats: %s" % ex) - + _LOG.warn( + "An error occurred gathering performance stats: %s" % ex + ) + self.request.response.write(json.dumps(metrics)) diff --git a/Products/ZenUI3/browser/streaming.py b/Products/ZenUI3/browser/streaming.py index dd128f1d04..472fc47057 100644 --- a/Products/ZenUI3/browser/streaming.py +++ b/Products/ZenUI3/browser/streaming.py @@ -7,19 +7,22 @@ # ############################################################################## +import cgi +import logging +import traceback from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile + from Products.ZenUtils.Utils import is_browser_connection_open -import logging + log = logging.getLogger("zen.streaming") -import traceback -import cgi LINE = """
%(data)s
""" + class StreamClosed(Exception): """ The browser has closed the connection. @@ -28,7 +31,7 @@ class StreamClosed(Exception): class StreamingView(BrowserView): - tpl = ViewPageTemplateFile('streaming.pt') + tpl = ViewPageTemplateFile("streaming.pt") def __init__(self, context, request): super(StreamingView, self).__init__(context, request) @@ -37,9 +40,9 @@ def __init__(self, context, request): def __call__(self): # tells nginx that we want to stream this text - self._stream.setHeader('X-Accel-Buffering', 'no') - self._stream.setHeader('Cache-Control', 'no-transform') - header, footer = str(self.tpl()).split('*****CONTENT_TOKEN*****') + self._stream.setHeader("X-Accel-Buffering", "no") + self._stream.setHeader("Cache-Control", "no-transform") + header, footer = str(self.tpl()).split("*****CONTENT_TOKEN*****") self._stream.write(header) try: try: @@ -47,30 +50,32 @@ def __call__(self): except StreamClosed: return except Exception: - self.write('Exception while performing command:
') - self.write('
%s
' % (traceback.format_exc())) + self.write("Exception while performing command:
") + self.write("
%s
" % (traceback.format_exc())) finally: self._stream.write(footer) self._stream.flush() self.request.close() - def write(self, data=''): + def write(self, data=""): data = cgi.escape(data) log.info("streaming data %s", data) if not is_browser_connection_open(self.request): - raise StreamClosed('The browser has closed the connection.') + raise StreamClosed("The browser has closed the connection.") html = LINE % { - 'lineclass': self._lineno % 2 and 'odd' or 'even', - 'data': data + "lineclass": self._lineno % 2 and "odd" or "even", + "data": data, } self._stream.write(html) self._lineno += 1 # fill up the buffer so it is more stream-y self._stream.write(" " * 1024) + class TestStream(StreamingView): def stream(self): import time + for i in range(100): self.write(i) time.sleep(0.5) diff --git a/Products/ZenUI3/browser/testing.py b/Products/ZenUI3/browser/testing.py index 2c36bab3e9..d5af65bea5 100644 --- a/Products/ZenUI3/browser/testing.py +++ b/Products/ZenUI3/browser/testing.py @@ -6,43 +6,58 @@ # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## -import os + import logging -log = logging.getLogger('zen.UITests') +import os + from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile + from Products.ZenUtils.Utils import zenPath -from Products.ZenUI3.browser.javascript import getAllZenPackResources + +from .javascript import getAllZenPackResources + +log = logging.getLogger("zen.UITests") class UserInterfaceTests(BrowserView): """ - Search through all the core javascript for tests files and send them back to the client - to be evaluated. + Search through all the core javascript for tests files and send them + back to the client to be evaluated. - Any javascript file that starts with a "test" is considered a test file + Any javascript file that starts with a "test" is considered a test file. """ + __call__ = ZopeTwoPageTemplateFile("templates/userinterfacetests.pt") def getTestFiles(self): testFiles = self.getAllCoreJSTestFiles() for resource in getAllZenPackResources(): - testFiles.extend(self.getTestFilesFromResource(resource['name'], resource['directory'])) + testFiles.extend( + self.getTestFilesFromResource( + resource["name"], resource["directory"] + ) + ) return testFiles def getTestFilesFromResource(self, resource, path): tests = [] - resourcePath = "++resource++%s%s" % (resource, path.split("resources")[1]) + resourcePath = "++resource++%s%s" % ( + resource, + path.split("resources")[1], + ) for root, dirs, files in os.walk(path): for f in files: - if f.lower().startswith('test') and f.lower().endswith('.js'): + if f.lower().startswith("test") and f.lower().endswith(".js"): testPath = os.path.join(root, f) tests.append(testPath.replace(path, resourcePath)) return tests def getAllCoreJSTestFiles(self): resource = "zenui" - path = zenPath('Products', 'ZenUI3', 'browser', 'resources', 'js', 'zenoss') + path = zenPath( + "Products", "ZenUI3", "browser", "resources", "js", "zenoss" + ) test = self.getTestFilesFromResource(resource, path) log.info("Got the following tests %s", test) return test diff --git a/Products/ZenUI3/browser/views.py b/Products/ZenUI3/browser/views.py index 16635c3e95..248515906f 100644 --- a/Products/ZenUI3/browser/views.py +++ b/Products/ZenUI3/browser/views.py @@ -8,28 +8,29 @@ ############################################################################## import os -from urllib import unquote + from cStringIO import StringIO +from urllib import unquote -from zope.interface import Interface -from zope.publisher.interfaces.browser import IDefaultBrowserLayer -from zope.component import getGlobalSiteManager from Products.Five.browser import BrowserView from Products.Five.browser.resource import DirectoryResourceFactory from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile +from zope.interface import Interface +from zope.publisher.interfaces.browser import IDefaultBrowserLayer +from zope.component import getGlobalSiteManager from Products import Zuul from Products.ZenUtils.guid.interfaces import IGUIDManager from Products.ZenUtils.Utils import zenPath -from Products.ZenModel.DataRoot import DataRoot class FileUpload(BrowserView): """ - Renders a file upload in an iframe and asks the context to handle the results + Renders a file upload in an iframe and asks the context to handle + the results. """ - template = ViewPageTemplateFile('./templates/formUpload.pt') + template = ViewPageTemplateFile("./templates/formUpload.pt") def __call__(self, *args, **kwargs): """ @@ -43,7 +44,8 @@ def __call__(self, *args, **kwargs): @property def isPostBack(self): - return self.request.get('submit') + return self.request.get("submit") + class Robots(BrowserView): """ @@ -55,7 +57,10 @@ def __call__(self, *args, **kwargs): Return the robots.txt in the resource dir """ import os.path - with open(os.path.dirname(__file__) +'/resources/txt/robots.txt') as f: + + with open( + os.path.dirname(__file__) + "/resources/txt/robots.txt" + ) as f: return f.read() @@ -73,7 +78,7 @@ def __call__(self, *args, **kwargs): request = self.request response = self.request.response obj = None - guid = request.get('guid', None) + guid = request.get("guid", None) if not guid: return response.write("The guid paramater is required") @@ -90,7 +95,8 @@ def __call__(self, *args, **kwargs): return response.write("Could not look up guid") path = obj.absolute_url_path() - return response.redirect(path + '?' + request.QUERY_STRING) + return response.redirect(path + "?" + request.QUERY_STRING) + class GetDaemonLogs(BrowserView): """ @@ -102,34 +108,38 @@ def __call__(self, *args, **kwargs): Takes the id and prints out logs if we can fetch them from the facade. """ - id = self.request.get('id', None) + id = self.request.get("id", None) response = self.request.response if not id: response.write("id parameter is missing") return - facade = Zuul.getFacade('applications', self.context) + facade = Zuul.getFacade("applications", self.context) try: log = facade.getLog(id) - self.request.response.setHeader('Content-Type', 'text/plain') + self.request.response.setHeader("Content-Type", "text/plain") response.write(log) except Exception as ex: - response.write( - "Unable to find service with id %s: %s" % (id, ex) - ) + response.write("Unable to find service with id %s: %s" % (id, ex)) + class GetDoc(BrowserView): def __call__(self, bundle): # check whitelist to make sure document requested is available serveFile = False for checkFile in self.context.dmd.getDocFilesInfo(): - if bundle == checkFile['filename']: + if bundle == checkFile["filename"]: serveFile = True break if serveFile: filename = os.path.join(zenPath("docs"), bundle) - self.request.RESPONSE.setHeader('Content-Type', 'application/x-gzip') - self.request.RESPONSE.setHeader('Content-Disposition', 'attachment;filename=' + os.path.basename(filename)) + self.request.RESPONSE.setHeader( + "Content-Type", "application/x-gzip" + ) + self.request.RESPONSE.setHeader( + "Content-Disposition", + "attachment;filename=" + os.path.basename(filename), + ) with open(filename) as f: return f.read() @@ -137,40 +147,47 @@ def __call__(self, bundle): class NginxStaticLocationBlocks(BrowserView): """ Builds syntactically correct nginx configuration that, if included in the - server block in zproxy's config, will allow it to load static files directly - from disk, taking Zope out of the equation. + server block in zproxy's config, will allow it to load static files + directly from disk, taking Zope out of the equation. - We accomplish this by looking up all registered - resources, then generating PageSpeed directives that will notice the URL - patterns that map to those views and skip directly to the directories they - represent. + We accomplish this by looking up all registered + resources, then generating PageSpeed + directives that will notice the URL patterns that map to those views and + skip directly to the directories they represent. This has the added of benefit of not needing to disable PageSpeed when in development mode. It will notice changes to the files and reload them appropriately. """ - loc_block_tpl = """ + loc_block_tpl = r""" pagespeed LoadFromFileMatch "^https?://[^/]+/\+\+resource\+\+{name}/" "{path}/"; pagespeed LoadFromFileMatch "^https?://[^/]+/[^/]+/@@/{name}/" "{path}/"; - """ + """ # noqa E501 def __call__(self, *args, **kwargs): blocks = StringIO() # Find all registered DirectoryResourceFactories gsm = getGlobalSiteManager() - adapters = [(n, a) - for n, a in gsm.adapters.lookupAll((IDefaultBrowserLayer,), Interface) - if isinstance(a, DirectoryResourceFactory)] + adapters = [ + (n, a) + for n, a in gsm.adapters.lookupAll( + (IDefaultBrowserLayer,), Interface + ) + if isinstance(a, DirectoryResourceFactory) + ] # Build location blocks for name, drf in adapters: - blocks.write(self.loc_block_tpl.format(name=name, path=drf._ResourceFactory__rsrc.path)) + blocks.write( + self.loc_block_tpl.format( + name=name, path=drf._ResourceFactory__rsrc.path + ) + ) # This is technically unnecessary for the current use case, but hey - self.request.response.setHeader('Content-Type', 'application/json') + self.request.response.setHeader("Content-Type", "application/json") # Return that there data return blocks.getvalue() - diff --git a/Products/ZenUI3/locales/javascript.py b/Products/ZenUI3/locales/javascript.py index ba3be51059..07356fd2c3 100644 --- a/Products/ZenUI3/locales/javascript.py +++ b/Products/ZenUI3/locales/javascript.py @@ -1,20 +1,20 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - +from Products.Five.browser import BrowserView from zope.component import queryUtility from zope.i18n.interfaces import ITranslationDomain from zope.i18n.negotiator import negotiator -from Products.Five.browser import BrowserView from Products.ZenUtils.jsonutils import json + def getDomainMessages(name, request): result = {} domain = queryUtility(ITranslationDomain, name) @@ -26,7 +26,7 @@ def getDomainMessages(name, request): cat = domain._data[path] data = cat._catalog._catalog # Strip out empty key - result = dict((k,v) for k,v in data.iteritems() if k) + result = dict((k, v) for k, v in data.iteritems() if k) return result @@ -34,10 +34,10 @@ class I18N(BrowserView): def __call__(self): tpl = "Zenoss.i18n._data = %s;" # Get messages for general keys - msgs = getDomainMessages('zenoss', self.request) + msgs = getDomainMessages("zenoss", self.request) # Add messages for the domain - dname = self.request.get('domain') + dname = self.request.get("domain") if dname: msgs.update(getDomainMessages(dname, self.request)) - self.request.response.setHeader('Content-Type', 'text/javascript') + self.request.response.setHeader("Content-Type", "text/javascript") return tpl % json(msgs) diff --git a/Products/ZenUI3/navigation/__init__.py b/Products/ZenUI3/navigation/__init__.py index b3f70e9d06..25bf2d9cb6 100644 --- a/Products/ZenUI3/navigation/__init__.py +++ b/Products/ZenUI3/navigation/__init__.py @@ -1,14 +1,14 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from .manager import SecondaryNavigationManager -from manager import SecondaryNavigationManager def getSelectedNames(view): mgr = SecondaryNavigationManager(view.context, view.request, view) @@ -19,5 +19,5 @@ def getSelectedNames(view): secondary = v.__name__ break else: - secondary = '' + secondary = "" return primary, secondary diff --git a/Products/ZenUI3/navigation/interfaces.py b/Products/ZenUI3/navigation/interfaces.py index 6d91ae9346..ce457815a2 100644 --- a/Products/ZenUI3/navigation/interfaces.py +++ b/Products/ZenUI3/navigation/interfaces.py @@ -1,32 +1,39 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from zope.publisher.interfaces.browser import ( + IBrowserSkinType, + IDefaultBrowserLayer, +) from zope.viewlet.interfaces import IViewletManager, IViewlet -from zope.publisher.interfaces.browser import IBrowserSkinType, IDefaultBrowserLayer + class IPrimaryNavigationMenu(IViewletManager): """ Navigation menu viewlet manager. """ + class ISecondaryNavigationMenu(IViewletManager): """ Navigation menu viewlet manager. """ + class INavigationItem(IViewlet): """ A navigable item. """ -class IZenossNav(IBrowserSkinType,IDefaultBrowserLayer): + +class IZenossNav(IBrowserSkinType, IDefaultBrowserLayer): """ Marker interface for our nav layer """ diff --git a/Products/ZenUI3/navigation/manager.py b/Products/ZenUI3/navigation/manager.py index a499c45150..7bd5ac2e70 100644 --- a/Products/ZenUI3/navigation/manager.py +++ b/Products/ZenUI3/navigation/manager.py @@ -1,28 +1,26 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -import zope.interface import zope.component -from zope.viewlet.interfaces import IViewletManager, IViewlet -from zope.contentprovider.interfaces import BeforeUpdateEvent +import zope.interface from Products.Five.viewlet.manager import ViewletManagerBase from interfaces import IPrimaryNavigationMenu, ISecondaryNavigationMenu -def viewletSortKey((name, viewlet)): +def viewletSortKey(args): """ Creates a sort key for this viewlet. Primary sort is viewlet weight, and secondary is viewlet name, guaranteeing the same order on each call. """ + name, viewlet = args try: return (float(viewlet.weight), name) except (AttributeError, ValueError): @@ -36,29 +34,31 @@ def sort(self, viewlets): return sorted(viewlets, key=viewletSortKey) - class PrimaryNavigationManager(WeightOrderedViewletManager): zope.interface.implements(IPrimaryNavigationMenu) +@zope.interface.implementer(ISecondaryNavigationMenu) class SecondaryNavigationManager(WeightOrderedViewletManager): """ A secondary level of navigation. Knows how to look up the parent item to see if it is selected. """ - zope.interface.implements(ISecondaryNavigationMenu) def getViewletsByParentName(self, name): - if not hasattr(self, 'viewlets'): self.update() - return [v for v in self.viewlets if v.parentItem==name] + if not hasattr(self, "viewlets"): + self.update() + return [v for v in self.viewlets if v.parentItem == name] def getActivePrimaryName(self): - primary = PrimaryNavigationManager(self.context, self.request, - self.__parent__) + primary = PrimaryNavigationManager( + self.context, self.request, self.__parent__ + ) primary.update() for v in primary.viewlets: - if v.selected: return v.__name__ + if v.selected: + return v.__name__ return None def getActiveViewlets(self): @@ -69,4 +69,4 @@ def getActiveViewlets(self): return viewlets def render(self): - return '\n'.join(v.render() for v in self.getActiveViewlets()) + return "\n".join(v.render() for v in self.getActiveViewlets()) diff --git a/Products/ZenUI3/navigation/menuitem.py b/Products/ZenUI3/navigation/menuitem.py index 72f48d632f..c167302477 100644 --- a/Products/ZenUI3/navigation/menuitem.py +++ b/Products/ZenUI3/navigation/menuitem.py @@ -1,40 +1,43 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +import re from itertools import chain -import re + import zope.interface -from zope.browserpage.viewpagetemplatefile import ViewPageTemplateFile + from Products.Five.viewlet import viewlet +from zope.browserpage.viewpagetemplatefile import ViewPageTemplateFile -from interfaces import INavigationItem -from manager import SecondaryNavigationManager +from .interfaces import INavigationItem +from .manager import SecondaryNavigationManager + +@zope.interface.implementer(INavigationItem) class PrimaryNavigationMenuItem(viewlet.ViewletBase): - zope.interface.implements(INavigationItem) - template = ViewPageTemplateFile('nav_item.pt') + template = ViewPageTemplateFile("nav_item.pt") - url = '' - target = '_self' - active_class = 'active' - inactive_class = 'inactive' + url = "" + target = "_self" + active_class = "active" + inactive_class = "inactive" subviews = () @property def title(self): return self.__name__ - + @property def elementid(self): - return "{0}-nav-button".format(self.__name__).replace(' ', '-') + return "{0}-nav-button".format(self.__name__).replace(" ", "-") def update(self): super(PrimaryNavigationMenuItem, self).update() @@ -43,12 +46,13 @@ def update(self): @property def selected(self): - requestURL = self.request.getURL().replace('/@@', '/') + requestURL = self.request.getURL().replace("/@@", "/") for url in chain((self.url,), self.subviews): - if re.search(url, requestURL) : + if re.search(url, requestURL): return True - sec = SecondaryNavigationManager(self.context, self.request, - self.__parent__) + sec = SecondaryNavigationManager( + self.context, self.request, self.__parent__ + ) if sec: for v in sec.getViewletsByParentName(self.__name__): if v.selected: @@ -84,18 +88,18 @@ def render(self): return self.template() # user does not have permission to view the menu item globally - return '' + return "" +@zope.interface.implementer(INavigationItem) class SecondaryNavigationMenuItem(PrimaryNavigationMenuItem): - zope.interface.implements(INavigationItem) parentItem = "" @property def selected(self): - requestURL = self.request.getURL().replace('/@@', '/') + requestURL = self.request.getURL().replace("/@@", "/") for url in chain((self.url,), self.subviews): - if re.search(url, requestURL) : + if re.search(url, requestURL): return True return False diff --git a/Products/ZenUI3/navigation/tests.py b/Products/ZenUI3/navigation/tests.py index 2aca058fb6..9a91c81875 100644 --- a/Products/ZenUI3/navigation/tests.py +++ b/Products/ZenUI3/navigation/tests.py @@ -1,40 +1,40 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import unittest -from zope.testing import doctestunit -from zope.component import testing from Testing import ZopeTestCase as ztc +from zope.component import testing -def test_suite(): - return unittest.TestSuite([ - - # Unit tests for your API - #doctestunit.DocFileSuite( - # 'README.txt', package='Products.ZenUI3.navigation', - # setUp=testing.setUp, tearDown=testing.tearDown), - - #doctestunit.DocTestSuite( - # module='Products.ZenUI3.mymodule', - # setUp=testing.setUp, tearDown=testing.tearDown), - - #Integration tests that use ZopeTestCase - ztc.ZopeDocFileSuite( - 'README.txt', package='Products.ZenUI3.navigation', - setUp=testing.setUp, tearDown=testing.tearDown), - - #ztc.FunctionalDocFileSuite( - # 'browser.txt', package='Products.ZenUI3'), - - ]) -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') +def test_suite(): + return unittest.TestSuite( + [ + # Unit tests for your API + # doctestunit.DocFileSuite( + # 'README.txt', package='Products.ZenUI3.navigation', + # setUp=testing.setUp, tearDown=testing.tearDown), + # doctestunit.DocTestSuite( + # module='Products.ZenUI3.mymodule', + # setUp=testing.setUp, tearDown=testing.tearDown), + # Integration tests that use ZopeTestCase + ztc.ZopeDocFileSuite( + "README.txt", + package="Products.ZenUI3.navigation", + setUp=testing.setUp, + tearDown=testing.tearDown, + ), + # ztc.FunctionalDocFileSuite( + # 'browser.txt', package='Products.ZenUI3'), + ] + ) + + +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/Products/ZenUI3/security/authorization.py b/Products/ZenUI3/security/authorization.py index 8987c6d9ad..2aaf1a643d 100644 --- a/Products/ZenUI3/security/authorization.py +++ b/Products/ZenUI3/security/authorization.py @@ -9,18 +9,20 @@ import json import transaction -from uuid import uuid1 + from Products.Five.browser import BrowserView + from Products.Zuul.interfaces import IAuthorizationTool -ZAUTH_HEADER_ID = 'X-ZAuth-Token' +ZAUTH_HEADER_ID = "X-ZAuth-Token" class Authorization(BrowserView): """ - This view acts as a namespace so the client requests are /authorization/login and - /authorization/validate + This view acts as a namespace so the client requests are + /authorization/login and /authorization/validate. """ + def __getitem__(self, index): if index == "login": return Login(self.context, self.request) @@ -33,43 +35,49 @@ class Login(BrowserView): """ Validates the credentials supplied and creates a new authorization token. """ + def __call__(self, *args, **kwargs): """ - Extract login/password credentials, test authentication, and create a token + Extract login/password credentials, test authentication, + and create a token. """ # test for uuid if self.uuid is None: self.request.response.setStatus(503) - self.request.response.write( "System uninitialized - please execute setup wizard") + self.request.response.write( + "System uninitialized - please execute setup wizard" + ) transaction.abort() return - authorization = IAuthorizationTool( self.context.context) + authorization = IAuthorizationTool(self.context.context) credentials = authorization.extractCredentials(self.request) - login = credentials.get('login', None) - password = credentials.get('password', None) + login = credentials.get("login", None) + password = credentials.get("password", None) # no credentials to test authentication if login is None or password is None: self.request.response.setStatus(401) - self.request.response.write( "Missing Authentication Credentials") + self.request.response.write("Missing Authentication Credentials") transaction.abort() return # test authentication if not authorization.authenticateCredentials(login, password): self.request.response.setStatus(401) - self.request.response.write( "Failed Authentication") + self.request.response.write("Failed Authentication") transaction.abort() return # create the session data token = authorization.createAuthToken(self.request) - self.request.response.setHeader( 'X-ZAuth-TokenId', token['id']) - self.request.response.setHeader( 'X-ZAuth-TokenExpiration', token['expires']) - self.request.response.setHeader( 'X-ZAuth-TenantId', self.uuid) + self.request.response.setHeader("X-ZAuth-TokenId", token["id"]) + self.request.response.setHeader( + "X-ZAuth-TokenExpiration", token["expires"] + ) + self.request.response.setHeader("X-ZAuth-TenantId", self.uuid) return json.dumps(token) @property @@ -80,6 +88,7 @@ def dmd(self): def uuid(self): return self.dmd.uuid + class Validate(BrowserView): """ Assert token id exists in session data and token id hasn't expired @@ -87,37 +96,42 @@ class Validate(BrowserView): def __call__(self, *args, **kwargs): """ - extract token id, test token expiration, and return token + extract token id, test token expiration, and return token """ # test for uuid if self.uuid is None: self.request.response.setStatus(503) - self.request.response.write( "System uninitialized - please execute setup wizard") + self.request.response.write( + "System uninitialized - please execute setup wizard" + ) return - tokenId = self.request.get('id', None) + tokenId = self.request.get("id", None) if tokenId is None: tokenId = self.request.getHeader(ZAUTH_HEADER_ID) # missing token id if tokenId is None: self.request.response.setStatus(401) - self.request.response.write( "Missing Token Id") + self.request.response.write("Missing Token Id") return - authorization = IAuthorizationTool( self.context.context) + authorization = IAuthorizationTool(self.context.context) - #grab token to handle edge case, when expiration happens after expiration test + # Grab token to handle edge case, when expiration happens after + # expiration test. tokenId = tokenId.strip('"') token = authorization.getToken(tokenId) if authorization.tokenExpired(tokenId): self.request.response.setStatus(401) - self.request.response.write( "Token Expired") + self.request.response.write("Token Expired") return - self.request.response.setHeader( 'X-ZAuth-TokenId', token['id']) - self.request.response.setHeader( 'X-ZAuth-TokenExpiration', token['expires']) - self.request.response.setHeader( 'X-ZAuth-TenantId', self.uuid) + self.request.response.setHeader("X-ZAuth-TokenId", token["id"]) + self.request.response.setHeader( + "X-ZAuth-TokenExpiration", token["expires"] + ) + self.request.response.setHeader("X-ZAuth-TenantId", self.uuid) return json.dumps(token) @property diff --git a/Products/ZenUI3/security/interfaces.py b/Products/ZenUI3/security/interfaces.py index 7e955e6c26..a08b21440d 100644 --- a/Products/ZenUI3/security/interfaces.py +++ b/Products/ZenUI3/security/interfaces.py @@ -1,13 +1,12 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from zope.viewlet.interfaces import IViewletManager, IViewlet @@ -16,6 +15,7 @@ class ISecurityManager(IViewletManager): The Viewlet manager for the security declaratives """ + class IPermissionsDeclarationViewlet(IViewlet): """ Will return to the client side all of our security declaritives diff --git a/Products/ZenUI3/security/security.py b/Products/ZenUI3/security/security.py index c6101ad542..78245dd556 100644 --- a/Products/ZenUI3/security/security.py +++ b/Products/ZenUI3/security/security.py @@ -7,23 +7,23 @@ # ############################################################################## - -from zope import interface, component +from AccessControl import getSecurityManager +from collective.beaker.interfaces import ISession +from Products.Five.viewlet import viewlet from Products.Five.viewlet.manager import ViewletManagerBase +from zope import interface + from Products.ZenUtils.jsonutils import json -from Products.Five.viewlet import viewlet -from interfaces import ISecurityManager, IPermissionsDeclarationViewlet -from AccessControl import getSecurityManager -from Products.ZenUtils.guid.interfaces import IGlobalIdentifier from Products.Zuul.interfaces import IAuthorizationTool -from collective.beaker.interfaces import ISession -ZAUTH_COOKIE = 'ZAuthToken' +from .interfaces import ISecurityManager, IPermissionsDeclarationViewlet + +ZAUTH_COOKIE = "ZAuthToken" + +@interface.implementer(ISecurityManager) class SecurityManager(ViewletManagerBase): - """The Viewlet manager class for the permissions declaration - """ - interface.implements(ISecurityManager) + """The Viewlet manager class for the permissions declaration""" def permissionsForContext(context): @@ -35,8 +35,11 @@ def permissionsForContext(context): all_permissions = context.zport.acl_users.possible_permissions() # filter out the ones we have in this context - valid_permissions = [permission for permission in all_permissions - if manager.checkPermission(permission, context)] + valid_permissions = [ + permission + for permission in all_permissions + if manager.checkPermission(permission, context) + ] # turn the list into a dictionary to make it easier to look up on # the client side (just look up the key instead of iterating) @@ -45,11 +48,12 @@ def permissionsForContext(context): perms[permission.lower()] = True return perms + +@interface.implementer(IPermissionsDeclarationViewlet) class PermissionsDeclaration(viewlet.ViewletBase): """This is responsible for sending to the client side which permissions the user has """ - interface.implements(IPermissionsDeclarationViewlet) def render(self): """Creates a global function in JavaScript that returns the @@ -59,7 +63,9 @@ def render(self): """ self._setAuthorizationCookie() permissions = self.permissionsForCurrentContext() - managedObjectGuids = self.getManagedObjectGuids(returnChildrenForRootObj=True) + managedObjectGuids = self.getManagedObjectGuids( + returnChildrenForRootObj=True + ) data = json(permissions) func = """ - """ % (data, json(managedObjectGuids), str(self.hasGlobalRoles()).lower()) + """ % ( + data, + json(managedObjectGuids), + str(self.hasGlobalRoles()).lower(), + ) return func def _setAuthorizationCookie(self): @@ -83,7 +93,13 @@ def _setAuthorizationCookie(self): authorization = IAuthorizationTool(self.context) token = authorization.createAuthToken(self.request) - self.request.response.setCookie(ZAUTH_COOKIE, token['id'], path="/", secure=session.secure, http_only=True) + self.request.response.setCookie( + ZAUTH_COOKIE, + token["id"], + path="/", + secure=session.secure, + http_only=True, + ) def hasGlobalRoles(self): """ @@ -106,5 +122,7 @@ def getManagedObjectGuids(self, returnChildrenForRootObj=False): guids = [] us = self.context.dmd.ZenUsers.getUserSettings() if us.hasNoGlobalRoles(): - guids = us.getAllAdminGuids(returnChildrenForRootObj=returnChildrenForRootObj) + guids = us.getAllAdminGuids( + returnChildrenForRootObj=returnChildrenForRootObj + ) return guids diff --git a/Products/ZenUI3/tests.py b/Products/ZenUI3/tests.py index 703537713e..163590a37d 100644 --- a/Products/ZenUI3/tests.py +++ b/Products/ZenUI3/tests.py @@ -1,40 +1,40 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import unittest -from zope.testing import doctestunit from zope.component import testing -from Testing import ZopeTestCase as ztc - -def test_suite(): - return unittest.TestSuite([ - - # Unit tests for your API - doctestunit.DocFileSuite( - 'README.txt', package='Products.ZenUI3', - setUp=testing.setUp, tearDown=testing.tearDown), - - #doctestunit.DocTestSuite( - # module='Products.ZenUI3.mymodule', - # setUp=testing.setUp, tearDown=testing.tearDown), - - # Integration tests that use ZopeTestCase - #ztc.ZopeDocFileSuite( - # 'README.txt', package='Products.ZenUI3', - # setUp=testing.setUp, tearDown=testing.tearDown), - - #ztc.FunctionalDocFileSuite( - # 'browser.txt', package='Products.ZenUI3'), +from zope.testing import doctestunit - ]) -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') +def test_suite(): + return unittest.TestSuite( + [ + # Unit tests for your API + doctestunit.DocFileSuite( + "README.txt", + package="Products.ZenUI3", + setUp=testing.setUp, + tearDown=testing.tearDown, + ), + # doctestunit.DocTestSuite( + # module='Products.ZenUI3.mymodule', + # setUp=testing.setUp, tearDown=testing.tearDown), + # Integration tests that use ZopeTestCase + # ztc.ZopeDocFileSuite( + # 'README.txt', package='Products.ZenUI3', + # setUp=testing.setUp, tearDown=testing.tearDown), + # ztc.FunctionalDocFileSuite( + # 'browser.txt', package='Products.ZenUI3'), + ] + ) + + +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/Products/ZenUI3/tooltips/interfaces.py b/Products/ZenUI3/tooltips/interfaces.py index f106cf6ef8..1f3ecb7de0 100644 --- a/Products/ZenUI3/tooltips/interfaces.py +++ b/Products/ZenUI3/tooltips/interfaces.py @@ -10,18 +10,21 @@ from zope.interface import Interface + class ITooltipProvider(Interface): """ - A marker interface for utilites that want to provide new/overridden pagehelp - and tooltop XML to supplement that found in Products/ZenUI3/data + A marker interface for utilites that want to provide new/overridden + pagehelp and tooltop XML to supplement that found in Products/ZenUI3/data. """ def path(self): - """ Return an absolute directory path of pagehelp + tooltip XML. - The expected file/directory layout is identical to Products/ZenUI3/data. + """Return an absolute directory path of pagehelp + tooltip XML. + The expected file/directory layout is identical to + Products/ZenUI3/data. + For example, a valid returned path of - '/opt/zenoss/ZenPacks/ZenPacks.zenoss.Example/tooltips' might have this - structure: + '/opt/zenoss/ZenPacks/ZenPacks.zenoss.Example/tooltips' might have + this structure: tooltips |---en diff --git a/Products/ZenUI3/tooltips/tooltips.py b/Products/ZenUI3/tooltips/tooltips.py index 8eb9db577e..9fae59e360 100644 --- a/Products/ZenUI3/tooltips/tooltips.py +++ b/Products/ZenUI3/tooltips/tooltips.py @@ -7,49 +7,72 @@ # ############################################################################## - -import re -import urllib -import os.path import logging +import os.path +import re + from xml.dom import minidom -from zope.i18n.negotiator import negotiator -from zope import component + from Products.Five.browser import BrowserView +from zope import component +from zope.i18n.negotiator import negotiator from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile -from Products.ZenUtils.jsonutils import json + from Products.ZenUI3.navigation import getSelectedNames from Products.ZenUI3.tooltips.interfaces import ITooltipProvider +from Products.ZenUtils.jsonutils import json -log = logging.getLogger('zen.tooltips') +log = logging.getLogger("zen.tooltips") -_datapath = os.path.join(os.path.dirname(__file__), 'data') -_valpat = re.compile(r'<[^<>]+>(.*)]+>', re.M|re.S) +_datapath = os.path.join(os.path.dirname(__file__), "data") +_valpat = re.compile(r"<[^<>]+>(.*)]+>", re.M | re.S) _tipattrs = { - 'showDelay':float, 'hideDelay':float, 'dismissDelay':float, - 'trackMouse':bool, 'anchorToTarget':bool, 'anchorOffset':int, - 'minWidth':int, 'maxWidth':int, 'shadow':str, 'defaultAlign':str, - 'autoRender':bool, 'quickShowInterval':int, 'frame':bool, 'hidden':bool, - 'baseCls':str, 'autoHeight':bool, 'closeAction':str, 'title':str, - 'html':str, 'target':str, 'closable':bool, 'anchor':str, 'autoHide':bool + "showDelay": float, + "hideDelay": float, + "dismissDelay": float, + "trackMouse": bool, + "anchorToTarget": bool, + "anchorOffset": int, + "minWidth": int, + "maxWidth": int, + "shadow": str, + "defaultAlign": str, + "autoRender": bool, + "quickShowInterval": int, + "frame": bool, + "hidden": bool, + "baseCls": str, + "autoHeight": bool, + "closeAction": str, + "title": str, + "html": str, + "target": str, + "closable": bool, + "anchor": str, + "autoHide": bool, } + def catalog_required(fn): - """ A decorator to be used around _TooltipCatalog functions that require + """A decorator to be used around _TooltipCatalog functions that require its _catalog to be built. A glorified way of lazy-loading the catalog to make sure zcml is fully loaded beforehand. """ + def inner(*args, **kwargs): if not args[0]._reloaded: args[0].reload() return fn(*args, **kwargs) + return inner + class _TooltipCatalog(object): """ Store the data pulled in from XML. This is a singleton and should not be instantiated directly. """ + _catalog = None def __init__(self): @@ -60,54 +83,55 @@ def add(self, lang, view, tip): self._catalog.setdefault(view, {}).setdefault(lang, []).append(tip) def _add_navhelp(self, lang, target, title, tip): - d = { - 'title':title, - 'tip':tip - } - self._catalog.setdefault('nav-help', {}).setdefault(lang, {})[target] = d + d = {"title": title, "tip": tip} + self._catalog.setdefault("nav-help", {}).setdefault(lang, {})[ + target + ] = d def reload(self): """ Read in tooltips from XML files. """ + def _load_tips(doc, lang, view): - for tip in doc.getElementsByTagName('tooltip'): + for tip in doc.getElementsByTagName("tooltip"): d = {} for node in tip.childNodes: - if isinstance(node, minidom.Text): continue + if isinstance(node, minidom.Text): + continue result = _valpat.search(node.toxml()) if result: value = result.groups()[0].strip() name = node.tagName - if name in _tipattrs and _tipattrs[name]!=str: + if name in _tipattrs and _tipattrs[name] != str: value = eval(value) if isinstance(value, basestring): - value = value.replace('%26', '&') + value = value.replace("%26", "&") d[name] = value - if 'autoHide' in d: - d['closable'] = not d['autoHide'] + if "autoHide" in d: + d["closable"] = not d["autoHide"] self.add(lang, view, d) def _load_navhelp(doc, lang): - for tip in doc.getElementsByTagName('pagehelp'): + for tip in doc.getElementsByTagName("pagehelp"): result = _valpat.search(tip.toxml()) - target = tip.getAttribute('target') - title = tip.getAttribute('title') + target = tip.getAttribute("target") + title = tip.getAttribute("title") if result and target: value = result.groups()[0].strip() self._add_navhelp(lang, target, title, value) def _load_files(_none, path, fnames): - lang = path.rsplit('/', 1)[-1] + lang = path.rsplit("/", 1)[-1] for f in fnames: - if not f.endswith('.xml'): + if not f.endswith(".xml"): continue view = f[:-4] fd = open(os.path.join(path, f)) data = fd.read() fd.close() - doc = minidom.parseString(data.replace('&', '%26')) - if f.startswith('nav-help'): + doc = minidom.parseString(data.replace("&", "%26")) + if f.startswith("nav-help"): _load_navhelp(doc, lang) else: _load_tips(doc, lang, view) @@ -117,9 +141,10 @@ def _load_files(_none, path, fnames): # Now load up other tooltips. The last one to override wins. # Processed by name in alphabetical order - for name, klass in \ - sorted(component.getUtilitiesFor(ITooltipProvider), key=lambda tup: tup[0]): - log.debug('Loading tooltip provider %s (%s)', name, klass) + for name, klass in sorted( + component.getUtilitiesFor(ITooltipProvider), key=lambda tup: tup[0] + ): + log.debug("Loading tooltip provider %s (%s)", name, klass) os.path.walk(klass().path(), _load_files, None) self._reloaded = True @@ -130,14 +155,15 @@ def tips(self, view, lang="en"): """ return self._catalog.get(view, {}).get(lang, [])[:] - @catalog_required def pagehelp(self, navitem, lang="en"): """ Look up the page-level help for a given screen and language. """ - return self._catalog.get('nav-help', {}).get(lang, {}).get(navitem, - None) + return ( + self._catalog.get("nav-help", {}).get(lang, {}).get(navitem, None) + ) + @catalog_required def langs(self, view): """ @@ -145,33 +171,39 @@ def langs(self, view): """ return self._catalog.get(view, {}).keys() + TooltipCatalog = _TooltipCatalog() + class Tooltips(BrowserView): def __call__(self): results = [] - viewname = self.request['HTTP_REFERER'].rsplit('/', 1)[-1] + viewname = self.request["HTTP_REFERER"].rsplit("/", 1)[-1] # incase there are query parameters in the url if "?" in viewname: viewname = viewname.split("?")[0] - lang = negotiator.getLanguage(TooltipCatalog.langs(viewname), - self.request) + lang = negotiator.getLanguage( + TooltipCatalog.langs(viewname), self.request + ) tips = TooltipCatalog.tips(viewname, lang) tpl = "Zenoss.registerTooltip(%s);" for tip in tips: results.append(tpl % json(tip)) - # Bypass caching because all tooltips for all pages use the same file tooltips.js - self.request.response.setHeader('Pragma', 'no-cache') - self.request.response.setHeader('Cache-Control', 'no-cache') - self.request.response.setHeader('Content-Type', 'text/javascript') - return "Ext.onReady(function(){%s})" % '\n'.join(results) + # Bypass caching because all tooltips for all pages use the same + # file tooltips.js + self.request.response.setHeader("Pragma", "no-cache") + self.request.response.setHeader("Cache-Control", "no-cache") + self.request.response.setHeader("Content-Type", "text/javascript") + return "Ext.onReady(function(){%s})" % "\n".join(results) class PageLevelHelp(BrowserView): - __call__ = ViewPageTemplateFile('pagehelp.pt') + __call__ = ViewPageTemplateFile("pagehelp.pt") + def __init__(self, context, request): super(PageLevelHelp, self).__init__(context, request) primary, secondary = getSelectedNames(self) - lang = negotiator.getLanguage(TooltipCatalog.langs('nav-help'), - self.request) + lang = negotiator.getLanguage( + TooltipCatalog.langs("nav-help"), self.request + ) self.tip = TooltipCatalog.pagehelp(primary, lang) diff --git a/Products/ZenUI3/utils/interfaces.py b/Products/ZenUI3/utils/interfaces.py index 346a7393d1..9d6dc1e668 100644 --- a/Products/ZenUI3/utils/interfaces.py +++ b/Products/ZenUI3/utils/interfaces.py @@ -1,15 +1,15 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from zope.viewlet.interfaces import IViewletManager, IViewlet + class IJavaScriptSnippetManager(IViewletManager): """ Simple way to get data from Zope to JavaScript layer. Viewlets deliver up @@ -17,11 +17,13 @@ class IJavaScriptSnippetManager(IViewletManager): template. """ + class IJavaScriptSnippet(IViewlet): """ Holds raw JavaScript to be delivered to the template by a RawJavaScriptManager. Subclass and override the L{snippet} method. """ + def snippet(): """ Returns a string containing raw javascript to be written to the page. diff --git a/Products/ZenUI3/utils/javascript.py b/Products/ZenUI3/utils/javascript.py index 23ae5e5174..0dd8b3b546 100644 --- a/Products/ZenUI3/utils/javascript.py +++ b/Products/ZenUI3/utils/javascript.py @@ -1,18 +1,17 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -import zope.interface -from Products.Five.viewlet.viewlet import ViewletBase from Products.Five.viewlet.manager import ViewletManagerBase +from Products.Five.viewlet.viewlet import ViewletBase +from zope.interface import implementer -from interfaces import IJavaScriptSnippetManager, IJavaScriptSnippet +from .interfaces import IJavaScriptSnippetManager, IJavaScriptSnippet SCRIPT_TAG_TEMPLATE = """ """ -class JavaScriptSnippetManager(ViewletManagerBase): - - zope.interface.implements(IJavaScriptSnippetManager) +@implementer(IJavaScriptSnippetManager) +class JavaScriptSnippetManager(ViewletManagerBase): def render(self): - raw_js = '\n'.join(v.render() for v in self.viewlets) + raw_js = "\n".join(v.render() for v in self.viewlets) return SCRIPT_TAG_TEMPLATE % raw_js +@implementer(IJavaScriptSnippet) class JavaScriptSnippet(ViewletBase): - - zope.interface.implements(IJavaScriptSnippet) - def snippet(self): - raise NotImplementedError("Subclasses must implement their own " - "snippet method.") + raise NotImplementedError( + "Subclasses must implement their own " "snippet method." + ) + def render(self): return self.snippet() diff --git a/Products/ZenUI3/utils/timeutils.py b/Products/ZenUI3/utils/timeutils.py index ecdc6b253e..d503339be4 100644 --- a/Products/ZenUI3/utils/timeutils.py +++ b/Products/ZenUI3/utils/timeutils.py @@ -1,17 +1,18 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -import time import math +import time + from decimal import Decimal + def readable_time(seconds, precision=1): """ Convert some number of seconds into a human-readable string. @@ -41,23 +42,31 @@ def readable_time(seconds, precision=1): """ if seconds is None: - return '0 seconds' + return "0 seconds" remaining = abs(seconds) if remaining < 1: - return '0 seconds' + return "0 seconds" - names = ('year', 'month', 'week', 'day', 'hour', 'minute', 'second') - mults = (60*60*24*365, 60*60*24*30, 60*60*24*7, 60*60*24, 60*60, 60, 1) + names = ("year", "month", "week", "day", "hour", "minute", "second") + mults = ( + 60 * 60 * 24 * 365, + 60 * 60 * 24 * 30, + 60 * 60 * 24 * 7, + 60 * 60 * 24, + 60 * 60, + 60, + 1, + ) result = [] for name, div in zip(names, mults): - num = Decimal(str(math.floor(remaining/div))) - remaining -= int(num)*div + num = Decimal(str(math.floor(remaining / div))) + remaining -= int(num) * div num = int(num) if num: - result.append('%d %s%s' %(num, name, num>1 and 's' or '')) - if len(result)==precision: + result.append("%d %s%s" % (num, name, num > 1 and "s" or "")) + if len(result) == precision: break - return ' '.join(result) + return " ".join(result) def relative_time(t, precision=1, cmptime=None): @@ -91,7 +100,7 @@ def relative_time(t, precision=1, cmptime=None): seconds = Decimal(str(t - cmptime)) result = readable_time(seconds, precision) if seconds < 0: - result += ' ago' + result += " ago" else: - result = 'in ' + result + result = "in " + result return result diff --git a/Products/ZenUI3/zope2.py b/Products/ZenUI3/zope2.py index 28156bff77..157b6d1e3f 100644 --- a/Products/ZenUI3/zope2.py +++ b/Products/ZenUI3/zope2.py @@ -1,10 +1,10 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## diff --git a/Products/ZenUtils/CheckRelations.py b/Products/ZenUtils/CheckRelations.py index e7e7991f1e..98f6d95445 100644 --- a/Products/ZenUtils/CheckRelations.py +++ b/Products/ZenUtils/CheckRelations.py @@ -1,28 +1,19 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -__doc__="""CmdBase - -Add data base access functions for command line programs - -$Id: CheckRelations.py,v 1.2 2004/10/19 22:28:59 edahl Exp $""" - -__version__ = "$Revision: 1.2 $"[11:-2] - +from __future__ import absolute_import import transaction - from Products.ZenUtils.Utils import getAllConfmonObjects +from Products.ZenUtils.ZenScriptBase import ZenScriptBase -from ZenScriptBase import ZenScriptBase class CheckRelations(ZenScriptBase): @@ -32,10 +23,13 @@ def rebuild(self): self.log.info("Checking relations...") for object in getAllConfmonObjects(self.dmd): ccount += 1 - self.log.debug("checking relations on object %s", object.getPrimaryDmdId()) + self.log.debug( + "checking relations on object %s", object.getPrimaryDmdId() + ) object.checkRelations(repair=repair) ch = object._p_changed - if not ch: object._p_deactivate() + if not ch: + object._p_deactivate() if ccount >= self.options.savepoint: transaction.savepoint() ccount = 0 @@ -43,24 +37,34 @@ def rebuild(self): self.log.info("not commiting any changes") else: trans = transaction.get() - trans.note('CheckRelations cleaned relations' ) + trans.note("CheckRelations cleaned relations") trans.commit() - def buildOptions(self): - ZenScriptBase.buildOptions(self) - - self.parser.add_option('-r', '--repair', - dest='repair', action="store_true", - help='repair all inconsistant relations') - - self.parser.add_option('-x', '--savepoint', - dest='savepoint', default=500, type="int", - help='how many lines should be loaded before savepoint') - - self.parser.add_option('-n', '--nocommit', - dest='nocommit', action="store_true", - help='Do not store changes to the Dmd (for debugging)') + super(CheckRelations, self).buildOptions() + + self.parser.add_option( + "-r", + "--repair", + dest="repair", + action="store_true", + help="repair all inconsistant relations", + ) + self.parser.add_option( + "-x", + "--savepoint", + dest="savepoint", + default=500, + type="int", + help="how many lines should be loaded before savepoint", + ) + self.parser.add_option( + "-n", + "--nocommit", + dest="nocommit", + action="store_true", + help="Do not store changes to the Dmd (for debugging)", + ) if __name__ == "__main__": diff --git a/Products/ZenUtils/CmdBase.py b/Products/ZenUtils/CmdBase.py index 04f0920566..e06181f35a 100644 --- a/Products/ZenUtils/CmdBase.py +++ b/Products/ZenUtils/CmdBase.py @@ -7,48 +7,61 @@ # ############################################################################## +from __future__ import absolute_import, print_function -__doc__="""CmdBase - -Provide utility functions for logging and config file parsing -to command-line programs -""" - -import os -import os.path -import sys import datetime import logging +import os +import os.path import re +import sys +import textwrap + from copy import copy +from optparse import ( + BadOptionError, + NO_DEFAULT, + Option, + OptionGroup, + OptionParser, + OptionValueError, + SUPPRESS_HELP, +) +from urllib import quote + import zope.component + from zope.traversing.adapters import DefaultTraversable from Zope2.App import zcml -from optparse import ( - OptionParser, OptionGroup, Option, - SUPPRESS_HELP, NO_DEFAULT, OptionValueError, BadOptionError, - ) -from urllib import quote +from .config import ConfigLoader +from .Utils import ( + getAllParserOptionsGen, + load_config_override, + unused, + zenPath, +) +from .GlobalConfig import ( + _convertConfigLinesToArguments, + getGlobalConfiguration, +) -from Products.ZenUtils.Utils import unused, load_config_override, zenPath, getAllParserOptionsGen -from Products.ZenUtils.GlobalConfig import _convertConfigLinesToArguments, applyGlobalConfToParser +# List of options to not include when generating a config file. +_OPTIONS_TO_IGNORE = ( + "", + "configfile", + "genconf", + "genxmlconfigs", + "genxmltable", + "help", + "version", +) -class DMDError: pass +class DMDError: + pass -def checkLogLevel(option, opt, value): - if re.match(r'^\d+$', value): - value = int(value) - else: - intval = getattr(logging, value.upper(), None) - if intval: - value = intval - else: - raise OptionValueError('"%s" is not a valid log level.' % value) - - return value def remove_args(argv, remove_args_novals, remove_args_vals): """ @@ -66,43 +79,63 @@ def remove_args(argv, remove_args_novals, remove_args_vals): for remove_arg in remove_args_vals: if remove_arg == arg: add_arg = False - it.next() # Skip the argument value + it.next() # Skip the argument value break - elif arg.startswith(remove_arg + '='): + elif arg.startswith(remove_arg + "="): add_arg = False break if add_arg: new_args.append(arg) return new_args -class LogSeverityOption(Option): + +def checkLogLevel(option, opt, value): + if re.match(r"^\d+$", value): + value = int(value) + else: + intval = getattr(logging, value.upper(), None) + if intval is None: + raise OptionValueError('"%s" is not a valid log level.' % value) + value = intval + + return value + + +class CmdBaseOption(Option): TYPES = Option.TYPES + ("loglevel",) TYPE_CHECKER = copy(Option.TYPE_CHECKER) TYPE_CHECKER["loglevel"] = checkLogLevel -class CmdBase(object): +LogSeverityOption = CmdBaseOption - doesLogging = True +class CmdBase(object): """ - Class used for all Zenoss commands + Base class used for most Zenoss commands. """ + + doesLogging = True + def __init__(self, noopts=0, args=None, should_log=None): zope.component.provideAdapter(DefaultTraversable, (None,)) # This explicitly loads all of the products - must happen first! from OFS.Application import import_products + import_products() - #make sure we aren't in debug mode + # make sure we aren't in debug mode import Globals + Globals.DevelopmentMode = False # We must import ZenossStartup at this point so that all Zenoss daemons # and tools will have any ZenPack monkey-patched methods available. import Products.ZenossStartup + unused(Products.ZenossStartup) zcml.load_site() import Products.ZenWidgets - load_config_override('scriptmessaging.zcml', Products.ZenWidgets) + + load_config_override("scriptmessaging.zcml", Products.ZenWidgets) self.usage = "%prog [options]" self.noopts = noopts @@ -118,41 +151,24 @@ def __init__(self, noopts=0, args=None, should_log=None): self.buildParser() self.buildOptions() - - # Get defaults from global.conf. They will be overridden by - # daemon-specific config file or command line arguments. - applyGlobalConfToParser(self.parser) + # Update the defaults from the config files + self.parser.defaults.update( + _get_defaults_from_config([] if self.noopts else self.inputArgs) + ) self.parseOptions() - if self.options.configfile: - self.parser.defaults = self.getConfigFileDefaults(self.options.configfile) - # We've updated the parser with defaults from configs, now we need - # to reparse our command-line to get the correct overrides from - # the command-line - self.parseOptions() if should_log is not None: self.doesLogging = should_log - + if self.doesLogging: self.setupLogging() - def buildParser(self): """ - Create the options parser + Create the options parser. """ if not self.parser: - from Products.ZenModel.ZenossInfo import ZenossInfo - try: - zinfo= ZenossInfo('') - version= str(zinfo.getZenossVersion()) - except Exception: - from Products.ZenModel.ZVersion import VERSION - version= VERSION - self.parser = OptionParser(usage=self.usage, - version="%prog " + version, - option_class=LogSeverityOption) - + self.parser = _build_parser() def buildOptions(self): """ @@ -163,117 +179,83 @@ def buildOptions(self): if self.doesLogging: group = OptionGroup(self.parser, "Logging Options") group.add_option( - '-v', '--logseverity', - dest='logseverity', default='INFO', type='loglevel', - help='Logging severity threshold', + "-v", + "--logseverity", + dest="logseverity", + default="INFO", + type="loglevel", + help="Logging severity threshold", ) group.add_option( - '--logpath', dest='logpath', default=zenPath('log'), type='str', - help='Override the default logging path; default $ZENHOME/log' + "--logpath", + dest="logpath", + default=zenPath("log"), + type="str", + help="Override the default logging path; default %default", ) group.add_option( - '--maxlogsize', - dest='maxLogKiloBytes', default=10240, type='int', - help='Max size of log file in KB; default 10240', + "--maxlogsize", + dest="maxLogKiloBytes", + default=10240, + type="int", + help="Max size of log file in KB; default %default", ) group.add_option( - '--maxbackuplogs', - dest='maxBackupLogs', default=3, type='int', - help='Max number of back up log files; default 3', + "--maxbackuplogs", + dest="maxBackupLogs", + default=3, + type="int", + help="Max number of back up log files; default %default", ) self.parser.add_option_group(group) - self.parser.add_option("-C", "--configfile", - dest="configfile", - help="Use an alternate configuration file" ) - - self.parser.add_option("--genconf", - action="store_true", - default=False, - help="Generate a template configuration file" ) + self.parser.add_option( + "-C", + "--configfile", + dest="configfile", + help="Use an alternate configuration file", + ) - self.parser.add_option("--genxmltable", - action="store_true", - default=False, - help="Generate a Docbook table showing command-line switches." ) + self.parser.add_option( + "--genconf", + action="store_true", + default=False, + help="Generate a template configuration file", + ) - self.parser.add_option("--genxmlconfigs", - action="store_true", - default=False, - help="Generate an XML file containing command-line switches." ) + self.parser.add_option( + "--genxmltable", + action="store_true", + default=False, + help="Generate a Docbook table showing command-line switches.", + ) + self.parser.add_option( + "--genxmlconfigs", + action="store_true", + default=False, + help="Generate an XML file containing command-line switches.", + ) def parseOptions(self): """ - Uses the optparse parse previously populated and performs common options. + Uses the optparse parse previously populated and performs common + options. """ - - if self.noopts: - args = [] - else: - args = self.inputArgs + args = [] if self.noopts else self.inputArgs (self.options, self.args) = self.parser.parse_args(args=args) if self.options.genconf: - self.generate_configs( self.parser, self.options ) + self.generate_configs(self.parser, self.options) if self.options.genxmltable: - self.generate_xml_table( self.parser, self.options ) + self.generate_xml_table(self.parser, self.options) if self.options.genxmlconfigs: - self.generate_xml_configs( self.parser, self.options ) - - - def getConfigFileDefaults(self, filename, correctErrors=True): - # TODO: This should be refactored - duplicated code with CmdBase. - """ - Parse a config file which has key-value pairs delimited by white space, - and update the parser's option defaults with these values. - - @parameter filename: name of configuration file - @type filename: string - """ - - options = self.parser.get_default_values() - lines = self.loadConfigFile(filename) - if lines: - lines, errors = self.validateConfigFile(filename, lines, - correctErrors=correctErrors) - - args = self.getParamatersFromConfig(lines) - try: - self.parser._process_args([], args, options) - except (BadOptionError, OptionValueError) as err: - print >>sys.stderr, 'WARN: %s in config file %s' % (err, filename) - - return options.__dict__ - - - def getGlobalConfigFileDefaults(self): - # Deprecated: This method is going away - it is duplicated in GlobalConfig.py - """ - Parse a config file which has key-value pairs delimited by white space, - and update the parser's option defaults with these values. - """ - - filename = zenPath('etc', 'global.conf') - options = self.parser.get_default_values() - lines = self.loadConfigFile(filename) - if lines: - args = self.getParamatersFromConfig(lines) - - try: - self.parser._process_args([], args, options) - except (BadOptionError, OptionValueError): - # Ignore it, we only care about our own options as defined in the parser - pass - - return options.__dict__ - + self.generate_xml_configs(self.parser, self.options) def loadConfigFile(self, filename): - # TODO: This should be refactored - duplicated code with CmdBase. """ Parse a config file which has key-value pairs delimited by white space. @@ -286,37 +268,61 @@ def loadConfigFile(self, filename): try: with open(filename) as file: for line in file: - if line.lstrip().startswith('#') or line.strip() == '': - lines.append(dict(type='comment', line=line)) + if line.lstrip().startswith("#") or line.strip() == "": + lines.append({"type": "comment", "line": line}) else: try: - # add default blank string for keys with no default value - # valid delimiters are space, ':' and/or '=' (see ZenUtils/config.py) - key, value = (re.split(r'[\s:=]+', line.strip(), 1) + ['',])[:2] + # Add default blank string for keys with no + # default value. + # Valid delimiters are space, ':' and/or '=' + # (see ZenUtils/config.py) + key, value = ( + re.split(r"[\s:=]+", line.strip(), maxsplit=1) + + [""] + )[:2] except ValueError: - lines.append(dict(type='option', line=line, key=line.strip(), value=None, option=None)) + lines.append( + { + "type": "option", + "line": line, + "key": line.strip(), + "value": None, + "option": None, + } + ) else: - option = self.parser.get_option('--%s' % key) - lines.append(dict(type='option', line=line, key=key, value=value, option=option)) + option = self.parser.get_option("--%s" % key) + lines.append( + { + "type": "option", + "line": line, + "key": key, + "value": value, + "option": option, + } + ) except IOError as e: - errorMessage = ('WARN: unable to read config file {filename} ' - '-- skipping. ({exceptionName}: {exception})').format( + errorMessage = ( + "WARN: unable to read config file {filename} " + "-- skipping. ({exceptionName}: {exception})" + ).format( filename=filename, exceptionName=e.__class__.__name__, - exception=e + exception=e, ) - print >>sys.stderr, errorMessage + print(errorMessage, file=sys.stderr) return [] return lines - - def validateConfigFile(self, filename, lines, correctErrors=True, warnErrors=True): + def validateConfigFile( + self, filename, lines, correctErrors=True, warnErrors=True + ): """ - Validate config file lines which has key-value pairs delimited by white space, - and validate that the keys exist for this command's option parser. If - the option does not exist or has an empty value it will comment it out - in the config file. + Validate config file lines which has key-value pairs delimited by + white space, and validate that the keys exist for this command's + option parser. If the option does not exist or has an empty value it + will comment it out in the config file. @parameter filename: path to the configuration file @type filename: string @@ -326,88 +332,102 @@ def validateConfigFile(self, filename, lines, correctErrors=True, warnErrors=Tru commented out. @type correctErrors: boolean """ - output = [] errors = [] validLines = [] date = datetime.datetime.now().isoformat() - errorTemplate = '## Commenting out by config parser (%s) on %s: %%s\n' % ( - sys.argv[0], date) + errorTemplate = ( + "## Commenting out by config parser (%s) on %s: %%s\n" + % (sys.argv[0], date) + ) for lineno, line in enumerate(lines): - if line['type'] == 'comment': - output.append(line['line']) - elif line['type'] == 'option': - if line['value'] is None: - errors.append((lineno + 1, 'missing value for "%s"' % line['key'])) - output.append(errorTemplate % 'missing value') - output.append('## %s' % line['line']) - elif line['option'] is None: - errors.append((lineno + 1, 'unknown option "%s"' % line['key'])) - output.append(errorTemplate % 'unknown option') - output.append('## %s' % line['line']) + if line["type"] == "comment": + output.append(line["line"]) + elif line["type"] == "option": + if line["value"] is None: + errors.append( + (lineno + 1, 'missing value for "%s"' % line["key"]) + ) + output.append(errorTemplate % "missing value") + output.append("## %s" % line["line"]) + elif line["option"] is None: + errors.append( + (lineno + 1, 'unknown option "%s"' % line["key"]) + ) + output.append(errorTemplate % "unknown option") + output.append("## %s" % line["line"]) else: validLines.append(line) - output.append(line['line']) + output.append(line["line"]) else: - errors.append((lineno + 1, 'unknown line "%s"' % line['line'])) - output.append(errorTemplate % 'unknown line') - output.append('## %s' % line['line']) + errors.append((lineno + 1, 'unknown line "%s"' % line["line"])) + output.append(errorTemplate % "unknown line") + output.append("## %s" % line["line"]) if errors: if correctErrors: for lineno, message in errors: - print >>sys.stderr, 'INFO: Commenting out %s on line %d in %s' % (message, lineno, filename) + print( + "INFO: Commenting out %s on line %d in %s" + % (message, lineno, filename), + file=sys.stderr, + ) - with open(filename, 'w') as file: + with open(filename, "w") as file: file.writelines(output) if warnErrors: for lineno, message in errors: - print >>sys.stderr, 'WARN: %s on line %d in %s' % (message, lineno, filename) + print( + "WARN: %s on line %d in %s" + % (message, lineno, filename), + file=sys.stderr, + ) return validLines, errors - def getParamatersFromConfig(self, lines): # Deprecated: This method is going away return _convertConfigLinesToArguments(self.parser, lines) - def setupLogging(self): """ - Set common logging options + Set common logging options. """ rlog = logging.getLogger() rlog.setLevel(logging.WARN) mname = self.__class__.__name__ - self.log = logging.getLogger("zen."+ mname) + self.log = logging.getLogger("zen." + mname) zlog = logging.getLogger("zen") try: loglevel = int(self.options.logseverity) except ValueError: - loglevel = getattr(logging, self.options.logseverity.upper(), logging.INFO) + loglevel = getattr( + logging, self.options.logseverity.upper(), logging.INFO + ) zlog.setLevel(loglevel) logdir = self.checkLogpath() if logdir: - logfile = os.path.join(logdir, mname.lower()+".log") + logfile = os.path.join(logdir, mname.lower() + ".log") maxBytes = self.options.maxLogKiloBytes * 1024 backupCount = self.options.maxBackupLogs - h = logging.handlers.RotatingFileHandler(logfile, maxBytes=maxBytes, - backupCount=backupCount) - h.setFormatter(logging.Formatter( - "%(asctime)s %(levelname)s %(name)s: %(message)s", - "%Y-%m-%d %H:%M:%S")) + h = logging.handlers.RotatingFileHandler( + logfile, maxBytes=maxBytes, backupCount=backupCount + ) + h.setFormatter( + logging.Formatter( + "%(asctime)s %(levelname)s %(name)s: %(message)s", + "%Y-%m-%d %H:%M:%S", + ) + ) rlog.addHandler(h) else: logging.basicConfig() - def checkLogpath(self): - """ - Validate the logpath is valid - """ + """Validate the logpath is valid.""" if not self.options.logpath: return None else: @@ -417,93 +437,60 @@ def checkLogpath(self): try: os.makedirs(logdir) except OSError: - raise SystemExit("logpath:%s doesn't exist and cannot be created" % logdir) + raise SystemExit( + "logpath:%s doesn't exist and cannot be created" + % logdir + ) elif not os.path.isdir(logdir): - raise SystemExit("logpath:%s exists but is not a directory" % logdir) + raise SystemExit( + "logpath:%s exists but is not a directory" % logdir + ) return logdir - - def pretty_print_config_comment( self, comment ): + def pretty_print_config_comment(self, comment): """ - Quick and dirty pretty printer for comments that happen to be longer than can comfortably -be seen on the display. + Quick and dirty pretty printer for comments that happen to be longer + than can comfortably be seen on the display. """ - - max_size= 40 - # - # As a heuristic we'll accept strings that are +- text_window - # size in length. - # - text_window= 5 - - if len( comment ) <= max_size + text_window: - return comment - - # - # First, take care of embedded newlines and expand them out to array entries - # - new_comment= [] - all_lines= comment.split( '\n' ) - for line in all_lines: - if len(line) <= max_size + text_window: - new_comment.append( line ) - continue - - start_position= max_size - text_window - while len(line) > max_size + text_window: - index= line.find( ' ', start_position ) - if index > 0: - new_comment.append( line[ 0:index ] ) - line= line[ index: ] - - else: - if start_position == 0: - # - # If we get here it means that the line is just one big string with no spaces - # in it. There's nothing that we can do except print it out. Doh! - # - new_comment.append( line ) - break - - # - # Okay, haven't found anything to split on -- go back and try again - # - start_position= start_position - text_window - if start_position < 0: - start_position= 0 - - else: - new_comment.append( line ) - - return "\n# ".join( new_comment ) - - - - def generate_configs( self, parser, options ): + new_comment = textwrap.wrap(comment, width=75) + return "# " + "\n# ".join(new_comment) + + def _get_default_value(self, parser, opt): + default_value = parser.defaults.get(opt.dest) + if default_value is NO_DEFAULT or default_value is None: + return "" + return str(default_value) + + def _get_help_text(self, opt, default_value): + if "%default" in opt.help: + return opt.help.replace("%default", default_value) + default_text = "" + if default_value != "": + default_text = " [default %s]" % (default_value,) + return opt.help + default_text + + def generate_configs(self, parser, options): """ - Create a configuration file based on the long-form of the option names + Create a configuration file based on the long-form of the option names. - @parameter parser: an optparse parser object which contains defaults, help - @parameter options: parsed options list containing actual values + :param parser: an optparse parser object which contains defaults, help + :param options: parsed options list containing actual values """ - # # Header for the configuration file # unused(options) - daemon_name= os.path.basename( sys.argv[0] ) - daemon_name= daemon_name.replace( '.py', '' ) + daemon_name = os.path.basename(sys.argv[0]) + daemon_name = daemon_name.replace(".py", "") - print """# + print( + """# # Configuration file for %s # # To enable a particular option, uncomment the desired entry. -# -# Parameter Setting -# --------- -------""" % ( daemon_name ) - - - options_to_ignore= ( 'help', 'version', '', 'genconf', 'genxmltable' ) +#""" + % (daemon_name,) + ) # # Create an entry for each of the command line flags @@ -511,75 +498,69 @@ def generate_configs( self, parser, options ): # NB: Ideally, this should print out only the option parser dest # entries, rather than the command line options. # - import re for opt in getAllParserOptionsGen(parser): - if opt.help is SUPPRESS_HELP: - continue - - # - # Get rid of the short version of the command - # - option_name= re.sub( r'.*/--', '', "%s" % opt ) - - # - # And what if there's no short version? - # - option_name= re.sub( r'^--', '', "%s" % option_name ) - - # - # Don't display anything we shouldn't be displaying - # - if option_name in options_to_ignore: - continue - - # - # Find the actual value specified on the command line, if any, - # and display it - # - - value= getattr( parser.values, opt.dest ) - - default_value= parser.defaults.get( opt.dest ) - if default_value is NO_DEFAULT or default_value is None: - default_value= "" - default_string= "" - if default_value != "": - default_string= ", default: " + str( default_value ) - - comment= self.pretty_print_config_comment( opt.help + default_string ) - - # - # NB: I would prefer to use tabs to separate the parameter name - # and value, but I don't know that this would work. - # - print """# -# %s -#%s %s""" % ( comment, option_name, value ) + if opt.help is SUPPRESS_HELP: + continue + + # + # Don't include items in the ignore list + # + option_name = re.sub(r".*/--", "", "%s" % opt) + option_name = re.sub(r"^--", "", "%s" % option_name) + if option_name in _OPTIONS_TO_IGNORE: + continue + + # + # Find the actual value specified on the command line, if any, + # and display it + # + default_value = self._get_default_value(parser, opt) + help_text = self._get_help_text(opt, default_value) + description = self.pretty_print_config_comment(help_text) + + value = getattr(parser.values, opt.dest) + if value is None: + value = default_value + + comment_char = "#" if str(value) == str(default_value) else "" + + # + # NB: I would prefer to use tabs to separate the parameter name + # and value, but I don't know that this would work. + # + print( + "\n".join( + ( + "#", + description, + "%s%s %s" % (comment_char, option_name, value), + ) + ) + ) # # Pretty print and exit # - print "#" - sys.exit( 0 ) - + print("#") + sys.exit(0) - - def generate_xml_table( self, parser, options ): + def generate_xml_table(self, parser, options): """ Create a Docbook table based on the long-form of the option names - @parameter parser: an optparse parser object which contains defaults, help - @parameter options: parsed options list containing actual values + :param parser: an optparse parser object which contains defaults, help + :param options: parsed options list containing actual values """ # # Header for the configuration file # unused(options) - daemon_name= os.path.basename( sys.argv[0] ) - daemon_name= daemon_name.replace( '.py', '' ) + daemon_name = os.path.basename(sys.argv[0]) + daemon_name = daemon_name.replace(".py", "") - print """ + print( + """
-%s Options +{name} Options - + @@ -606,10 +587,10 @@ def generate_xml_table( self, parser, options ): -""" % ( daemon_name, daemon_name, daemon_name, daemon_name ) - - - options_to_ignore= ( 'help', 'version', '', 'genconf', 'genxmltable' ) +""".format( # noqa E501 + name=daemon_name + ) + ) # # Create an entry for each of the command line flags @@ -618,73 +599,91 @@ def generate_xml_table( self, parser, options ): # entries, rather than the command line options. # import re + for opt in getAllParserOptionsGen(parser): - if opt.help is SUPPRESS_HELP: - continue - - # - # Create a Docbook-happy version of the option strings - # Yes, would be better semantically, but the output - # just looks goofy in a table. Use literal instead. - # - all_options= '' + re.sub( r'/', ',', "%s" % opt ) + '' - - # - # Don't display anything we shouldn't be displaying - # - option_name= re.sub( r'.*/--', '', "%s" % opt ) - option_name= re.sub( r'^--', '', "%s" % option_name ) - if option_name in options_to_ignore: - continue - - default_value= parser.defaults.get( opt.dest ) - if default_value is NO_DEFAULT or default_value is None: - default_value= "" - default_string= "" - if default_value != "": - default_string= " Default: " + str( default_value ) + "\n" - - comment= self.pretty_print_config_comment( opt.help ) + if opt.help is SUPPRESS_HELP: + continue -# -# TODO: Determine the variable name used and display the --option_name=variable_name -# - if opt.action in [ 'store_true', 'store_false' ]: - print """ + # + # Create a Docbook-happy version of the option strings + # Yes, would be better semantically, but the output + # just looks goofy in a table. Use literal instead. + # + all_options = ( + "" + + re.sub( + r"/", ",", "%s" % opt + ) + + "" + ) + + # + # Don't display anything we shouldn't be displaying + # + option_name = re.sub(r".*/--", "", "%s" % opt) + option_name = re.sub(r"^--", "", "%s" % option_name) + if option_name in _OPTIONS_TO_IGNORE: + continue + + default_value = self._get_default_value(parser, opt) + + if "%default" in opt.help: + comment = opt.help.replace("%default", default_value) + else: + comment = opt.help + + default_string = "" + if default_value != "": + default_string = ( + " Default: " + + str(default_value) + + "\n" + ) + + # comment = self.pretty_print_config_comment(opt.help) + + # + # TODO: Determine the variable name used and display the + # --option_name=variable_name + # + if opt.action in ["store_true", "store_false"]: + print( + """ %s %s %s - -""" % ( all_options, comment, default_string ) +""" + % (all_options, comment, default_string) + ) - else: - target= '=' + opt.dest.lower() + '' - all_options= all_options + target - all_options= re.sub( r',', target + ',', all_options ) - print """ + else: + target = "=" + opt.dest + "" + all_options = all_options + target + all_options = re.sub(r",", target + ",", all_options) + print( + """ %s %s %s - -""" % ( all_options, comment, default_string ) - - +""" + % (all_options, comment, default_string) + ) # # Close the table elements # - print """ + print( + """
%s Daemons%s options{name} Daemons{name} options
""" - sys.exit( 0 ) - - + ) + sys.exit(0) - def generate_xml_configs( self, parser, options ): + def generate_xml_configs(self, parser, options): """ Create an XML file that can be used to create Docbook files as well as used as the basis for GUI-based daemon option @@ -695,21 +694,17 @@ def generate_xml_configs( self, parser, options ): # Header for the configuration file # unused(options) - daemon_name= os.path.basename( sys.argv[0] ) - daemon_name= daemon_name.replace( '.py', '' ) + daemon_name = os.path.basename(sys.argv[0]).replace(".py", "") export_date = datetime.datetime.now() - print """ + print( + """ - -""" % ( export_date, daemon_name ) - - options_to_ignore= ( - 'help', 'version', '', 'genconf', 'genxmltable', - 'genxmlconfigs', +""" + % (export_date, daemon_name) ) # @@ -718,42 +713,108 @@ def generate_xml_configs( self, parser, options ): # NB: Ideally, this should print out only the option parser dest # entries, rather than the command line options. # - import re for opt in getAllParserOptionsGen(parser): - if opt.help is SUPPRESS_HELP: - continue - - # - # Don't display anything we shouldn't be displaying - # - option_name= re.sub( r'.*/--', '', "%s" % opt ) - option_name= re.sub( r'^--', '', "%s" % option_name ) - if option_name in options_to_ignore: - continue - - default_value= parser.defaults.get( opt.dest ) - if default_value is NO_DEFAULT or default_value is None: - default_string= "" - else: - default_string= str( default_value ) - -# -# TODO: Determine the variable name used and display the --option_name=variable_name -# - if opt.action in [ 'store_true', 'store_false' ]: - print """ -""" - sys.exit( 0 ) + print("\n") + sys.exit(0) + + +def _build_parser(cls=OptionParser): + from Products.ZenModel.ZenossInfo import ZenossInfo + + try: + zinfo = ZenossInfo("") + version = str(zinfo.getZenossVersion()) + except Exception: + from Products.ZenModel.ZVersion import VERSION + + version = VERSION + return cls( + version="%prog " + version, + option_class=CmdBaseOption, + ) + + +def _get_defaults_from_config(args): + overrides = dict(getGlobalConfiguration()) + + cparser = _build_parser(cls=_KnownOptionsParser) + cparser.add_option( + "-C", + "--configfile", + dest="configfile", + ) + + opts, _ = cparser.parse_args(args=args) + if opts.configfile: + try: + appcfg = ConfigLoader(opts.configfile)() + overrides.update(appcfg) + except Exception as ex: # noqa: F841 S110 + # Restore this code when the wrapper scripts no longer + # add the -C option all the time. + # print("warning: {}".format(ex), file=sys.stderr) + pass + return {key.replace("-", "_"): value for key, value in overrides.items()} + + +class _KnownOptionsParser(OptionParser): + """ + Extend OptionParser to skip unknown options and disable --help. + """ + + def __init__(self, *args, **kwargs): + OptionParser.__init__(self, *args, add_help_option=False, **kwargs) + + def _process_long_opt(self, rargs, values): + try: + OptionParser._process_long_opt(self, rargs, values) + except BadOptionError: + pass + + def _process_short_opts(self, rargs, values): + try: + OptionParser._process_short_opts(self, rargs, values) + except BadOptionError: + pass diff --git a/Products/ZenUtils/DaemonStats.py b/Products/ZenUtils/DaemonStats.py index 51f98cf129..3f5193bb06 100644 --- a/Products/ZenUtils/DaemonStats.py +++ b/Products/ZenUtils/DaemonStats.py @@ -7,13 +7,14 @@ # ############################################################################## +import time -import time, os +from .controlplane import configuration as cc_config class DaemonStats(object): """ - Utility for a daemon to write out internal performance statistics + Utility for a daemon to write out internal performance statistics. """ def __init__(self): @@ -22,12 +23,27 @@ def __init__(self): self.metric_writer = None self._threshold_notifier = None self._derivative_tracker = None - self._service_id = None - self._tenant_id = None - self._instance_id = None - - def config(self, name, monitor, metric_writer, threshold_notifier, - derivative_tracker): + self._ctx_id = None + self._ctx_key = None + + tags = {"internal": True} + # Only capture the control center variables that have a value. + if cc_config.service_id: + tags["serviceId"] = cc_config.service_id + if cc_config.tenant_id: + tags["tenantId"] = cc_config.tenant_id + if cc_config.instance_id: + tags["instance"] = cc_config.instance_id + self._common_tags = tags + + def config( + self, + name, + monitor, + metric_writer, + threshold_notifier, + derivative_tracker, + ): """ Initialize the object. We could do this in __init__, but that would delay creation to after configuration time, which @@ -42,66 +58,54 @@ def config(self, name, monitor, metric_writer, threshold_notifier, self._threshold_notifier = threshold_notifier self._derivative_tracker = derivative_tracker - # when running inside control plane pull the service id from the environment - if os.environ.get( 'CONTROLPLANE', "0") == "1": - self._tenant_id = os.environ.get('CONTROLPLANE_TENANT_ID') - self._service_id = os.environ.get('CONTROLPLANE_SERVICE_ID') - self._instance_id = os.environ.get('CONTROLPLANE_INSTANCE_ID') - - def _context_id(self): - return self.name + "-" + self.monitor + # Update the common tags + self._common_tags.update({"daemon": name, "monitor": monitor}) - def _contextKey(self): - return "/".join(('Daemons', self.monitor)) + # evaluate identifiers once + self._ctx_id = name + "-" + monitor + self._ctx_key = "/".join(("Daemons", monitor)) def _tags(self, metric_type): - tags = { - 'daemon': self.name, - 'monitor': self.monitor, - 'metricType': metric_type, - 'internal': True - } - if self._service_id: - tags['serviceId'] = self._service_id - - if self._tenant_id: - tags['tenantId'] = self._tenant_id - - if self._instance_id: - tags['instance'] = self._instance_id - + tags = self._common_tags.copy() + tags["metricType"] = metric_type return tags def derive(self, name, value): """Write a DERIVE value and post any relevant events""" - self.post_metrics(name, value, 'DERIVE') + self.post_metrics(name, value, "DERIVE") def counter(self, name, value): """Write a COUNTER value and post any relevant events""" - self.post_metrics(name, value, 'COUNTER') + self.post_metrics(name, value, "COUNTER") def gauge(self, name, value): """Write a GAUGE value and post any relevant events""" - self.post_metrics(name, value, 'GAUGE') + self.post_metrics(name, value, "GAUGE") def post_metrics(self, name, value, metric_type): tags = self._tags(metric_type) timestamp = time.time() - context_id = self._context_id() - if metric_type in {'DERIVE', 'COUNTER'}: + if metric_type in {"DERIVE", "COUNTER"}: # compute (and cache) a rate for COUNTER/DERIVE - if metric_type == 'COUNTER': + if metric_type == "COUNTER": metric_min = 0 else: - metric_min = 'U' + metric_min = "U" value = self._derivative_tracker.derivative( - '%s:%s' % (context_id, name), (float(value), timestamp), - min=metric_min) + "%s:%s" % (self._ctx_id, name), + (float(value), timestamp), + min=metric_min, + ) if value is not None: self._metric_writer.write_metric(name, value, timestamp, tags) # check for threshold breaches and send events when needed self._threshold_notifier.notify( - self._contextKey(), context_id, self.name+'_'+name, timestamp, value) + self._ctx_key, + self._ctx_id, + self.name + "_" + name, + timestamp, + value, + ) diff --git a/Products/ZenUtils/Executor.py b/Products/ZenUtils/Executor.py index fdcd1abdff..5087fc4bbe 100644 --- a/Products/ZenUtils/Executor.py +++ b/Products/ZenUtils/Executor.py @@ -19,21 +19,25 @@ def makeExecutor(queue=None, limit=0, log=log, startnow=True): - """Return a new task executor. + """ + Return a new task executor. A limit of zero implies no limit. - @param name: Name of the executor - @type name: str - - @param queue: A queue-like object for storing tasks - @type queue: defer.DeferredQueue() or similiar - - @param limit: The maximum number of concurrent tasks - @type limit: int - - @param log: The log object - @type log: logging.Logger + If startnow is False, then the `start` method must be called on the + returned executor to start the executor running. + + :param name: Name of the executor + :type name: str + :param queue: A queue-like object for storing tasks + :type queue: defer.DeferredQueue() or equivalent + :param limit: The maximum number of concurrent tasks + :type limit: int + :param log: The log object + :type log: logging.Logger + :param startnow: If True, start the executor immediately (default True). + :type startnow: boolean + :rtype: AsyncExecutor """ if queue is None: queue = defer.DeferredQueue() @@ -136,10 +140,15 @@ def queued(self): def submit(self, call, timeout=None, label=""): """Submit a callable to run asynchronously. - @param call: A callable to be executed - @type call: callable + :param call: A callable to be executed + :type call: callable + :param timeout: how long a task can run before timeout? + :type timeout: float + :param label: A optional value to apply to a task. This value can + be used to associate related tasks. + :type label: str - @return: A Deferred that returns the return value of the callable + :return: A Deferred that returns the return value of the callable if it does not raise an exception. If the callable raises an exception, the Deferred returns the exception. """ @@ -207,15 +216,12 @@ def execute(self, task): ) task.error(ex) except Exception as ex: - message = ( - "Bad task executor=%s task-id=%s label=%s", - self._id, task.id, task.label - ) + message = "Bad task executor=%s task-id=%s label=%s" + params = (self._id, task.id, task.label) if self._log.isEnabledFor(logging.DEBUG): - self._log.exception(message) + self._log.exception(message, *params) else: - self._log.error("%s: %s", message, ex) - + self._log.error(message + " error=%s", *(params + (ex,))) task.error(ex) finally: self._tasks_running -= 1 diff --git a/Products/ZenUtils/GlobalConfig.py b/Products/ZenUtils/GlobalConfig.py index 3244e5c6c1..6049825086 100644 --- a/Products/ZenUtils/GlobalConfig.py +++ b/Products/ZenUtils/GlobalConfig.py @@ -1,43 +1,46 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from __future__ import absolute_import, print_function import logging -log = logging.getLogger('zen.GlobalConfig') -import sys -from optparse import OptionValueError, BadOptionError -import re import os.path +import re +import sys -from Products.ZenUtils.Utils import zenPath, getAllParserOptionsGen -from Products.ZenUtils.config import Config, ConfigLoader - +from optparse import OptionValueError, BadOptionError -CONFIG_FILE = zenPath('etc', 'global.conf') +from .config import Config, ConfigLoader +from .Utils import zenPath, getAllParserOptionsGen +log = logging.getLogger("zen.GlobalConfig") +CONFIG_FILE = zenPath("etc", "global.conf") +_KEYVALUE = re.compile( + r"^[\s ]*(?P[a-z_]+[a-z0-9_-]*)[\s]+(?P[^\s#]+)", re.IGNORECASE +).search -_KEYVALUE = re.compile("^[\s ]*(?P[a-z_]+[a-z0-9_-]*)[\s]+(?P[^\s#]+)", re.IGNORECASE).search def globalConfToDict(): settings = {} - globalConfFile = zenPath('etc','global.conf') + globalConfFile = zenPath("etc", "global.conf") if os.path.exists(globalConfFile): - with open(globalConfFile, 'r') as f: + with open(globalConfFile, "r") as f: for line in f.xreadlines(): match = _KEYVALUE(line) if match: - value = match.group('value') + value = match.group("value") if value.isdigit(): value = int(value) - settings[match.group('key')] = value + settings[match.group("key")] = value return settings + class GlobalConfig(Config): """ A method for retrieving the global configuration options @@ -46,19 +49,23 @@ class GlobalConfig(Config): @todo Add validation for expected keys and values """ - pass + _GLOBAL_CONFIG = ConfigLoader(CONFIG_FILE, GlobalConfig) + + def getGlobalConfiguration(): return _GLOBAL_CONFIG() def flagToConfig(flag): return flag.trim().lstrip("-").replace("-", "_") - + + def configToFlag(option): return "--" + option.strip().replace("_", "-") + def _convertConfigLinesToArguments(parser, lines): """ Converts configuration file lines of the format: @@ -73,39 +80,50 @@ def _convertConfigLinesToArguments(parser, lines): @parameter parser: OptionParser object containing configuration options. @type parser: OptionParser - @parameter lines: List of dictionary object parsed from a configuration file. - Each option is expected to have 'type', 'key', 'value' entries. + @parameter lines: List of dictionary object parsed from a configuration + file. Each option is expected to have 'type', 'key', 'value' entries. @type lines: list of dictionaries. - @return: List of command-line arguments corresponding to the configuration file. + @return: List of command-line arguments corresponding to the + configuration file. @rtype: list of strings """ # valid key # an option's string without the leading "--" # can differ from an option's destination - validOpts = set((opt.get_opt_string() for opt in getAllParserOptionsGen(parser))) - + validOpts = { + opt.get_opt_string() for opt in getAllParserOptionsGen(parser) + } + args = [] for line in lines: - if line.get('type', None) != 'option': + if line.get("type", None) != "option": continue - optstring = configToFlag(line['key']) + optstring = configToFlag(line["key"]) if optstring in validOpts: option = parser.get_option(optstring) - value = line.get('value', '') - boolean_value = value.lower() in ('true','yes','1') if value else False - if option.action == 'store_true': + value = line.get("value", "") + boolean_value = ( + value.lower() in ("true", "yes", "1") if value else False + ) + if option.action == "store_true": if boolean_value: args.append(optstring) - elif option.action == 'store_false': + elif option.action == "store_false": if not boolean_value: args.append(optstring) else: - args.extend([optstring, line['value'],]) + args.extend( + [ + optstring, + line["value"], + ] + ) else: log.debug("Unknown option: %s", optstring) return args + class _GlobalConfParserAdapter(object): def __init__(self, parser): self.parser = parser @@ -126,8 +144,9 @@ def _getGlobalConfigFileDefaults(self): args = _convertConfigLinesToArguments(self.parser, lines) try: self.parser._process_args([], args, options) - except (BadOptionError, OptionValueError) as err: - # Ignore it, we only care about our own options as defined in the parser + except (BadOptionError, OptionValueError): + # Ignore it, we only care about our own options as defined + # in the parser. pass return options.__dict__ @@ -143,24 +162,42 @@ def _loadConfigFile(self, filename): try: with open(filename) as file: for line in file: - if line.lstrip().startswith('#') or line.strip() == '': - lines.append(dict(type='comment', line=line)) + if line.lstrip().startswith("#") or line.strip() == "": + lines.append({"type": "comment", "line": line}) else: try: key, value = line.strip().split(None, 1) except ValueError: - lines.append(dict(type='option', line=line, key=line.strip(), value=None, option=None)) + lines.append( + { + "type": "option", + "line": line, + "key": line.strip(), + "value": None, + "option": None, + } + ) else: - option = self.parser.get_option('--%s' % key) - lines.append(dict(type='option', line=line, key=key, value=value, option=option)) + option = self.parser.get_option("--%s" % key) + lines.append( + { + "type": "option", + "line": line, + "key": key, + "value": value, + "option": option, + } + ) except IOError as e: - errorMessage = 'WARN: unable to read config file {filename} \ - -- skipping. ({exceptionName}: {exception})'.format( - filename=filename, - exceptionName=e.__class__.__name__, - exception=e + errorMessage = ( + "WARN: unable to read config file {filename} -- skipping. " + "({exceptionName}: {exception})".format( + filename=filename, + exceptionName=e.__class__.__name__, + exception=e, + ) ) - print >>sys.stderr, errorMessage + print(errorMessage, file=sys.stderr) return [] return lines diff --git a/Products/ZenUtils/IpUtil.py b/Products/ZenUtils/IpUtil.py index 750141e30f..8dffcbc59e 100644 --- a/Products/ZenUtils/IpUtil.py +++ b/Products/ZenUtils/IpUtil.py @@ -195,20 +195,18 @@ def ipToDecimal(ip): calculating netmasks etc. >>> ipToDecimal('10.10.20.5') - 168432645L + 168432645 >>> try: ipToDecimal('10.10.20.500') ... except IpAddressError as ex: print ex 10.10.20.500 is an invalid address """ checkip(ip) - # The unit tests expect to always get a long, while the - # ipaddr.IPaddress class doesn't provide a direct "to long" capability unwrapped = ipunwrap(ip) if '%' in unwrapped: address = unwrapped[:unwrapped.index('%')] else: address = unwrapped - return long(int(IPAddress(address))) + return int(IPAddress(address)) def ipFromIpMask(ipmask): """ @@ -309,7 +307,7 @@ def maskToBits(netmask): 0 """ if isinstance(netmask, basestring) and '.' in netmask: - test = 0xffffffffL + test = 0xffffffff if netmask[0]=='0': return 0 masknumb = ipToDecimal(netmask) for i in range(32): @@ -333,16 +331,16 @@ def bitsToDecimalMask(netbits): Convert integer number of netbits to a decimal number >>> bitsToDecimalMask(32) - 4294967295L + 4294967295 >>> bitsToDecimalMask(19) - 4294959104L + 4294959104 >>> bitsToDecimalMask(0) - 0L + 0 """ - masknumb = 0L + masknumb = 0 netbits=int(netbits) for i in range(32-netbits, 32): - masknumb += 2L ** i + masknumb += 2 ** i return masknumb @@ -371,12 +369,12 @@ def decimalNetFromIpAndNet(ip, netmask): Get network address of IP as string netmask as in the form 255.255.255.0 >>> getnet('10.12.25.33', 24) - 168564992L + 168564992 >>> getnet('10.12.25.33', '255.255.255.0') - 168564992L + 168564992 """ checkip(ip) - return long(int(IPNetwork( ipunwrap(ip) + '/' + str(netmask)).network)) + return int(IPNetwork(ipunwrap(ip) + '/' + str(netmask)).network) def getnetstr(ip, netmask): """ diff --git a/Products/ZenUtils/MetricReporter.py b/Products/ZenUtils/MetricReporter.py index b01c490f06..7bd485f31a 100644 --- a/Products/ZenUtils/MetricReporter.py +++ b/Products/ZenUtils/MetricReporter.py @@ -1,24 +1,23 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2017, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## import json +import inspect import logging -import os -import requests import time -from twisted.internet import reactor, defer, task -from metrology.registry import registry -from astrolabe.interval import Interval -from itertools import izip from collections import deque +from itertools import izip +import requests + +from astrolabe.interval import Interval from metrology.instruments import ( Counter, Gauge, @@ -27,99 +26,79 @@ Timer, UtilizationTimer, ) - +from metrology.registry import registry from metrology.reporter.base import Reporter +from twisted.internet import reactor, defer, task -log = logging.getLogger("zen.metricreporter") - - -class TimeOnce(object): - """ - a simple context manager to time something and save tag values and - a measurement. - """ - def __init__(self, gauge, *args): - self.gauge = gauge - self.tagValues = args - def __enter__(self): - self.interval = Interval.now() - def __exit__(self, *args): - self.gauge.update(self.tagValues, self.interval.stop()) +from .controlplane import configuration as cc_config +DEFAULT_METRIC_URL = "http://localhost:22350/api/metrics/store" -class QueueGauge(object): - """ - This instrument contains simple point-in-time measurements like a gauge. - Unlike a gauge, however, it: - - can be configured to have tags whose values can vary with each measurement - - contains a queue of values with tag values, which are read only once each - Many values or none can be written to this instrument between cycles of its - reporter, so for it many or no values will be published. - Calling an instance returns something which should append to the instances - queue a tuple, which should contain 1 value for each tagKey of the instance, - followed by a measurement. - """ - def __init__(self, *args): - self.newContextManager = args[0] if callable(args[0]) else TimeOnce - self.tagKeys = args if not callable(args[0]) else args[1:] - self.queue = deque() - def __call__(self, *args): - if len(self.tagKeys) != len(args): - raise RuntimeError('The number of tag values provided does not match the number of configured tag keys') - return self.newContextManager(self, *args) - def update(self, tagValues, metricValue): - self.queue.appendleft(tagValues + (metricValue,)) +log = logging.getLogger("zen.metricreporter") class MetricReporter(Reporter): def __init__(self, **options): - super(MetricReporter, self).__init__(interval=30) - self.prefix = options.get('prefix', "") - self.metric_destination = os.environ.get("CONTROLPLANE_CONSUMER_URL", "") - if self.metric_destination == "": - self.metric_destination = "http://localhost:22350/api/metrics/store" + interval = options.get("interval", 30) + super(MetricReporter, self).__init__(interval=interval) + self.prefix = options.get("prefix", "") + self.metric_destination = cc_config.consumer_url + if not self.metric_destination: + self.metric_destination = DEFAULT_METRIC_URL self.session = None - self.tags = None - self.tags = { - 'serviceId': os.environ.get('CONTROLPLANE_SERVICE_ID', ''), - 'instance': os.environ.get('CONTROLPLANE_INSTANCE_ID', ''), - 'hostId': os.environ.get('CONTROLPLANE_HOST_ID', ''), - 'tenantId': os.environ.get('CONTROLPLANE_TENANT_ID', ''), - } + self.tags = dict(options.get("tags", {})) + self.tags.update( + { + "serviceId": cc_config.service_id, + "instance": cc_config.instance_id, + "hostId": cc_config.host_id, + "tenantId": cc_config.tenant_id, + } + ) + + def add_tags(self, tags): + self.tags.update(tags) + # @override def write(self): self._write() def _write(self): - metrics = getMetrics(self.registry, self.tags, self.prefix) try: + metrics = getMetrics(self.registry, self.tags, self.prefix) self.postMetrics(metrics) except Exception as e: - log.error(e) + log.exception(e) def postMetrics(self, metrics): if not self.session: self.session = requests.Session() - self.session.headers.update({'Content-Type': 'application/json'}) - self.session.headers.update({'User-Agent': 'Zenoss Service Metrics'}) - post_data = {'metrics': metrics} - log.debug("Sending metric payload: %s", post_data) - response = self.session.post(self.metric_destination, - data=json.dumps(post_data)) + self.session.headers.update({"Content-Type": "application/json"}) + self.session.headers.update({"User-Agent": "Zenoss Service Metrics"}) + post_data = {"metrics": metrics} + log.debug("sending metric payload: %s", post_data) + response = self.session.post( + self.metric_destination, data=json.dumps(post_data) + ) if response.status_code != 200: - log.warning("Problem submitting metrics: %d, %s", - response.status_code, response.text.replace('\n', '\\n')) + log.warning( + "problem submitting metrics: %d, %s", + response.status_code, + response.text.replace("\n", "\\n"), + ) self.session = None else: - log.debug("%d Metrics posted", len(metrics)) + log.debug("%d metrics posted", len(metrics)) class TwistedMetricReporter(object): - def __init__(self, interval=30, metricWriter=None, tags={}, *args, **options): + def __init__( + self, interval=30, metricWriter=None, tags={}, *args, **options + ): super(TwistedMetricReporter, self).__init__() - self.registry = options.get('registry', registry) - self.prefix = options.get('prefix', "") + self.registry = options.get("registry", registry) + self.prefix = options.get("prefix", "") self.metricWriter = metricWriter self.interval = interval self.tags = {} @@ -132,7 +111,7 @@ def doStart(): self._loop.start(self.interval, now=False) reactor.callWhenRunning(doStart) - reactor.addSystemEventTrigger('before', 'shutdown', self.stop) + reactor.addSystemEventTrigger("before", "shutdown", self.stop) @defer.inlineCallbacks def stop(self): @@ -145,47 +124,105 @@ def stop(self): def postMetrics(self): try: for metric in getMetrics(self.registry, self.tags, self.prefix): - yield self.metricWriter.write_metric(metric['metric'], metric['value'], metric['timestamp'], - metric['tags']) + yield self.metricWriter.write_metric( + metric["metric"], + metric["value"], + metric["timestamp"], + metric["tags"], + ) except Exception: log.exception("Error writing metrics") +class TimeOnce(object): + """ + A context manager to time something and save tag values and + a measurement. + """ + + def __init__(self, gauge, *args): + self.gauge = gauge + self.tagValues = args + + def __enter__(self): + self.interval = Interval.now() + + def __exit__(self, *args): + self.gauge.update(self.tagValues, self.interval.stop()) + + +class QueueGauge(object): + """ + This instrument contains simple point-in-time measurements like a gauge. + + Unlike a gauge, however, it: + - can be configured to have tags whose values can vary with each + measurement. + - contains a queue of values with tag values, which are read only + once each. + + Many values or none can be written to this instrument between cycles of its + reporter, so for it many or no values will be published. + + Calling an instance returns something which should append to the instances + queue a tuple, which should contain 1 value for each tagKey of the + instance, followed by a measurement. + """ + + def __init__(self, *args): + self.newContextManager = args[0] if callable(args[0]) else TimeOnce + self.tagKeys = args if not callable(args[0]) else args[1:] + self.queue = deque() + + def __call__(self, *args): + if len(self.tagKeys) != len(args): + raise RuntimeError( + "The number of tag values provided does not match the " + "number of configured tag keys" + ) + return self.newContextManager(self, *args) + + def update(self, tagValues, metricValue): + self.queue.appendleft(tagValues + (metricValue,)) + + def getMetrics(mRegistry, tags, prefix): metrics = [] - snapshot_keys = ['median', 'percentile_95th'] for name, metric in mRegistry: log.debug("metric info: %s, %s", name, metric) - if isinstance(metric, Meter): - keys = ['count', 'one_minute_rate', 'five_minute_rate', - 'fifteen_minute_rate', 'mean_rate'] - metrics.extend(log_metric(name, metric, keys, tags, prefix)) - if isinstance(metric, Gauge): - keys = ['value'] - metrics.extend(log_metric(name, metric, keys, tags, prefix)) - if isinstance(metric, QueueGauge): - metrics.extend(log_queue_gauge(name, metric, tags, prefix)) - if isinstance(metric, UtilizationTimer): - keys = ['count', 'one_minute_rate', 'five_minute_rate', - 'fifteen_minute_rate', 'mean_rate', 'min', 'max', - 'mean', 'stddev', 'one_minute_utilization', - 'five_minute_utilization', 'fifteen_minute_utilization', - 'mean_utilization'] - metrics.extend(log_metric(name, metric, keys, tags, prefix, snapshot_keys)) - if isinstance(metric, Timer): - keys = ['count', 'one_minute_rate', 'five_minute_rate', - 'fifteen_minute_rate', 'mean_rate', 'min', 'max', 'mean', - 'stddev'] - metrics.extend(log_metric(name, metric, keys, tags, prefix, snapshot_keys)) - if isinstance(metric, Counter): - keys = ['count'] - metrics.extend(log_metric(name, metric, keys, tags, prefix)) - if isinstance(metric, Histogram): - keys = ['count', 'min', 'max', 'mean', 'stddev'] - metrics.extend(log_metric(name, metric, keys, tags, prefix, snapshot_keys)) + metrics.extend(getMetricData(metric, name, tags, prefix)) return metrics -def log_queue_gauge(name, metric, tags, prefix): + +def getMetric(mRegistry, name, tags, prefix): + if name not in mRegistry: + log.info("%s not found in metric registry", name) + return [] + metric = mRegistry.get(name) + return getMetricData(metric, name, tags, prefix) + + +_snapshot_keys = ["median", "percentile_95th"] + + +def getMetricData(metric, name, tags, prefix): + config = _getMetricConfig(metric) + if config is None: + log.info("could not generate a config for metric %s", name) + return [] + fn = config["fn"] + keys = config["keys"] + return fn(name, metric, keys, tags, prefix, _snapshot_keys) + +def _getMetricConfig(metric): + keys = (_classname(cls) for cls in inspect.getmro(metric.__class__)) + return next( + (_metric_configs.get(key) for key in keys if key in _metric_configs), + None, + ) + + +def _log_queue_gauge(name, metric, tags, prefix): """ A QueueGauge needs this unique handler because it does not contain a fixed number of values. @@ -201,16 +238,26 @@ def log_queue_gauge(name, metric, tags, prefix): stat = metric.queue.pop() qtags = tags.copy() qtags.update(izip(metric.tagKeys, stat)) - results.append({"metric": whole_metric_name, - "value": stat[-1], - "timestamp": ts, - "tags": qtags}) + results.append( + { + "metric": whole_metric_name, + "value": stat[-1], + "timestamp": ts, + "tags": qtags, + } + ) + log.debug( + "recording metric metric=%s value=%s", + whole_metric_name, + stat[-1], + ) except Exception as e: log.error(e) return results -def log_metric(name, metric, keys, tags, prefix, snapshot_keys=None): + +def _log_metric(name, metric, keys, tags, prefix, snapshot_keys=None): results = [] if snapshot_keys is None: @@ -221,19 +268,106 @@ def log_metric(name, metric, keys, tags, prefix, snapshot_keys=None): try: for stat in keys: whole_metric_name = "%s.%s" % (metric_name, stat) - results.append({"metric": whole_metric_name, - "value": getattr(metric, stat), - "timestamp": ts, - "tags": tags}) + results.append( + { + "metric": whole_metric_name, + "value": getattr(metric, stat), + "timestamp": ts, + "tags": tags, + } + ) + log.debug( + "recording metric metric=%s value=%s", + whole_metric_name, + getattr(metric, stat), + ) - if hasattr(metric, 'snapshot'): + if hasattr(metric, "snapshot"): snapshot = metric.snapshot for stat in snapshot_keys: whole_metric_name = "%s.%s" % (metric_name, stat) - results.append({"metric": whole_metric_name, - "value": getattr(snapshot, stat), - "timestamp": ts, - "tags": tags}) + results.append( + { + "metric": whole_metric_name, + "value": getattr(snapshot, stat), + "timestamp": ts, + "tags": tags, + } + ) + log.debug( + "recording metric metric=%s value=%s", + whole_metric_name, + getattr(snapshot, stat), + ) except Exception as e: log.error(e) return results + + +def _log_without_snapshot(name, metric, keys, tags, prefix, snapshot_keys): + return _log_metric(name, metric, keys, tags, prefix) + + +def _classname(obj): + return obj.__name__ if isinstance(obj, type) else type(obj).__name__ + + +_metric_configs = { + _classname(Meter): { + "fn": _log_without_snapshot, + "keys": [ + "count", + "one_minute_rate", + "five_minute_rate", + "fifteen_minute_rate", + "mean_rate", + ], + }, + _classname(Gauge): {"fn": _log_without_snapshot, "keys": ["value"]}, + _classname(QueueGauge): { + "fn": lambda name, metric, _, tags, prefix: _log_queue_gauge( + name, metric, tags, prefix + ), + "keys": [], + }, + _classname(UtilizationTimer): { + "fn": _log_metric, + "keys": [ + "count", + "one_minute_rate", + "five_minute_rate", + "fifteen_minute_rate", + "mean_rate", + "min", + "max", + "mean", + "stddev", + "one_minute_utilization", + "five_minute_utilization", + "fifteen_minute_utilization", + "mean_utilization", + ], + }, + _classname(Timer): { + "fn": _log_metric, + "keys": [ + "count", + "one_minute_rate", + "five_minute_rate", + "fifteen_minute_rate", + "mean_rate", + "min", + "max", + "mean", + "stddev", + ], + }, + _classname(Counter): { + "fn": _log_without_snapshot, + "keys": ["count"], + }, + _classname(Histogram): { + "fn": _log_metric, + "keys": ["count", "min", "max", "mean", "stddev"], + }, +} diff --git a/Products/ZenUtils/MetricServiceRequest.py b/Products/ZenUtils/MetricServiceRequest.py index 5fda9f0dfe..49f9c50fa8 100644 --- a/Products/ZenUtils/MetricServiceRequest.py +++ b/Products/ZenUtils/MetricServiceRequest.py @@ -67,7 +67,6 @@ def getMetrics(self, uuid, dpNames, cf='AVERAGE', rate=False, downsample="1h-avg metrics.append(dict( metric=name, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), - rpn='', rate=rate, format='%.2lf', tags=dict(contextUUID=[uuid]), @@ -98,9 +97,8 @@ def fetchMetrics(self, metrics, start="1h-ago", end=None, returnSet="EXACT"): """ metricQueries = [] for metric in metrics: - log.info("fetchMetrics metrics %s", metric) + log.debug("fetchMetrics metrics %s", metric) cf = metric.get('cf', 'average') - rpn = metric.get('rpn', '') rate = metric.get('rate', False) tags = metric['tags'] downsample = metric.get('downsample', '5m-avg') @@ -109,21 +107,18 @@ def fetchMetrics(self, metrics, start="1h-ago", end=None, returnSet="EXACT"): metric=metricName, downsample=downsample, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), - rpn=rpn, rate=rate, format='%.2lf', - tags=tags, - name=metricName + tags=tags )) request = dict( returnset=returnSet, start=start, end=end, - downsample=downsample, queries=metricQueries ) body = FileBodyProducer(StringIO(json.dumps(request))) - log.info("POST %s %s %s", self._metric_url_v2, self._headers, json.dumps(request)) + log.debug("POST %s %s %s", self._metric_url_v2, self._headers, json.dumps(request)) d = self.agent.request('POST', self._metric_url_v2, self._headers, body) return d diff --git a/Products/ZenUtils/MySqlZodbFactory.py b/Products/ZenUtils/MySqlZodbFactory.py index 84fe8368ea..451a1c73a7 100644 --- a/Products/ZenUtils/MySqlZodbFactory.py +++ b/Products/ZenUtils/MySqlZodbFactory.py @@ -9,8 +9,10 @@ import logging import optparse +import time import uuid +import MySQLdb import relstorage.adapters.mysql import relstorage.options import relstorage.storage @@ -24,6 +26,7 @@ _DEFAULT_MYSQLPORT = 3306 _DEFAULT_COMMIT_LOCK_TIMEOUT = 30 +_OPERATIONAL_ERROR_RETRY_DELAY = 0.5 log = logging.getLogger("zen.MySqlZodbFactory") @@ -133,7 +136,10 @@ def getConnection(self, **kwargs): if cache_servers: relstoreParams["cache_servers"] = cache_servers - storage = relstorage.storage.RelStorage(adapter, **relstoreParams) + storage = _get_storage(adapter, relstoreParams) + if storage is None: + raise SystemExit("Unable to retrieve ZODB storage") + cache_size = kwargs.get("zodb_cachesize", 1000) db = ZODB.DB(storage, cache_size=cache_size) import Globals @@ -151,7 +157,7 @@ def buildOptions(self, parser): group.add_option( "-R", "--zodb-dataroot", - dest="dataroot", + dest="zodb_dataroot", default="/zport/dmd", help="root object for data load (i.e. /zport/dmd)", ) @@ -160,7 +166,7 @@ def buildOptions(self, parser): dest="zodb_cachesize", default=1000, type="int", - help="in memory cachesize default: 1000", + help="in memory cachesize default: %default", ) group.add_option( "--zodb-host", @@ -222,7 +228,29 @@ def buildOptions(self, parser): help=( "Specify the number of seconds a database connection will " "wait to acquire a database 'commit' lock before failing " - "(defaults to 30 seconds if not specified)." + "(defaults to %default seconds if not specified)." ), ) parser.add_option_group(group) + + +def _get_storage(adapter, params): + attempt = 0 + while attempt < 3: + try: + return relstorage.storage.RelStorage(adapter, **params) + except MySQLdb.OperationalError as ex: + error = str(ex) + # Sleep for a very short duration. Celery signal handlers + # are given short durations to complete. + time.sleep(_OPERATIONAL_ERROR_RETRY_DELAY) + attempt += 1 + except Exception as ex: + log.exception("unexpected failure") + # To avoid retrying on unexpected errors, set `attempt` to 3 to + # cause the loop to exit on the next iteration to allow the + # "else:" clause to run and cause this worker to exit. + error = str(ex) + attempt = 3 + else: + log.error("failed to initialize ZODB connection: %s", error) diff --git a/Products/ZenUtils/PBUtil.py b/Products/ZenUtils/PBUtil.py index c7a4cff93c..3ff95dfd43 100644 --- a/Products/ZenUtils/PBUtil.py +++ b/Products/ZenUtils/PBUtil.py @@ -1,34 +1,42 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2017, all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## -import logging -log = logging.getLogger("zen.pbclientfactory") +import logging -from twisted.spread.pb import PBClientFactory from twisted.internet import protocol, reactor, defer, task from twisted.internet.error import ConnectionClosed -import socket +from twisted.spread.pb import PBClientFactory + +log = logging.getLogger("zen.pbclientfactory") OPTION_STATE = 1 CONNECT_TIMEOUT = 60 -class ReconnectingPBClientFactory(PBClientFactory, - protocol.ReconnectingClientFactory): +class ReconnectingPBClientFactory( + PBClientFactory, protocol.ReconnectingClientFactory +): maxDelay = 60 - def __init__(self, connectTimeout=30, pingPerspective=True, pingInterval=30, pingtimeout=120): + def __init__( + self, + connectTimeout=30, + pingPerspective=True, + pingInterval=30, + pingtimeout=120, + ): PBClientFactory.__init__(self) self._creds = None self._scheduledConnectTimeout = None self._connectTimeout = connectTimeout - # should the perspective be pinged. Perspective must have a ping method. Deprecated => Always False. + # should the perspective be pinged. Perspective must have a ping + # method. Deprecated => Always False. self._shouldPingPerspective = pingPerspective # how often to ping self._pingInterval = pingInterval @@ -61,14 +69,20 @@ def clientConnectionFailed(self, connector, reason): self._perspective = None self._cancelConnectTimeout() PBClientFactory.clientConnectionFailed(self, connector, reason) - protocol.ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) + protocol.ReconnectingClientFactory.clientConnectionFailed( + self, connector, reason + ) def clientConnectionLost(self, connector, reason): log.debug("clientConnectionLost %s", reason) self._perspective = None self._cancelConnectTimeout() - PBClientFactory.clientConnectionLost(self, connector, reason, reconnecting=1) - protocol.ReconnectingClientFactory.clientConnectionLost(self, connector, reason) + PBClientFactory.clientConnectionLost( + self, connector, reason, reconnecting=1 + ) + protocol.ReconnectingClientFactory.clientConnectionLost( + self, connector, reason + ) def clientConnectionMade(self, broker): log.debug("clientConnectionMade") @@ -96,11 +110,13 @@ def gotPerspective(self, perspective): def gotPerspectiveFailed(self, reason): self._cancelConnectTimeout() - if reason.type == 'twisted.cred.error.UnauthorizedLogin': + if reason.type == "twisted.cred.error.UnauthorizedLogin": log.critical("zenhub username/password combination is incorrect!") # Don't exit as Enterprise caches info and can survive else: - log.critical("Unknown connection problem to zenhub %s", reason.type) + log.critical( + "Unknown connection problem to zenhub %s", reason.type + ) def _gotPerspective(self, perspective): self._cancelConnectTimeout() @@ -117,21 +133,26 @@ def _disconnect(self): try: self.connector.disconnect() except Exception: - log.exception('Could not disconnect') + log.exception("Could not disconnect") else: - log.debug('No connector or broker to disconnect') - + log.debug("No connector or broker to disconnect") + # methods for connecting and login timeout def _startConnectTimeout(self, msg): self._cancelConnectTimeout() - self._scheduledConnectTimeout = reactor.callLater(self._connectTimeout, self._timeoutConnect, msg) + self._scheduledConnectTimeout = reactor.callLater( + self._connectTimeout, self._timeoutConnect, msg + ) def _timeoutConnect(self, msg): log.info("%s timed out after %s seconds", msg, self._connectTimeout) self._disconnect() def _cancelConnectTimeout(self): - self._scheduledConnectTimeout, timeout = None, self._scheduledConnectTimeout + self._scheduledConnectTimeout, timeout = ( + None, + self._scheduledConnectTimeout, + ) if timeout and timeout.active(): log.debug("Cancelling connect timeout") timeout.cancel() @@ -139,8 +160,9 @@ def _cancelConnectTimeout(self): # methods to check connection is active def _startPingTimeout(self): if not self._pingTimeout: - self._pingTimeout = reactor.callLater(self._pingTimeoutTime, - self._doPingTimeout) + self._pingTimeout = reactor.callLater( + self._pingTimeoutTime, self._doPingTimeout + ) def _cancelPingTimeout(self): self._pingTimeout, timeout = None, self._pingTimeout @@ -150,7 +172,10 @@ def _cancelPingTimeout(self): def _doPingTimeout(self): if self._perspective: - log.warn("Perspective ping timed out after %s seconds", self._pingTimeoutTime) + log.warn( + "Perspective ping timed out after %s seconds", + self._pingTimeoutTime, + ) self._disconnect() @defer.inlineCallbacks @@ -170,12 +195,12 @@ def _startPingCycle(self): def _pingPerspective(self): try: if self._perspective: - log.debug('pinging perspective') + log.debug("pinging perspective") self._startPingTimeout() - response = yield self._perspective.callRemote('ping') + response = yield self._perspective.callRemote("ping") log.debug("perspective %sed", response) else: - log.debug('skipping perspective ping') + log.debug("skipping perspective ping") self._cancelPingTimeout() except ConnectionClosed: log.info("Connection was closed") @@ -187,6 +212,7 @@ def _pingPerspective(self): def setKeepAlive(sock): """Configure a socket for a longer keep-alive interval.""" import socket + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, OPTION_STATE) sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, CONNECT_TIMEOUT) interval = max(CONNECT_TIMEOUT / 4, 10) diff --git a/Products/ZenUtils/Step.py b/Products/ZenUtils/Step.py old mode 100755 new mode 100644 diff --git a/Products/ZenUtils/Utils.py b/Products/ZenUtils/Utils.py index 38d8e39ee8..64232d5b11 100644 --- a/Products/ZenUtils/Utils.py +++ b/Products/ZenUtils/Utils.py @@ -9,58 +9,64 @@ """Utils -General utility functions module +General utility functions module. """ from __future__ import absolute_import -import sys -import select -import popen2 -import fcntl -import time -import os -import types +import asyncore +import contextlib +import copy import ctypes -import tempfile +import fcntl +import httplib import logging +import math +import os +import popen2 import re +import select +import shlex import socket -import math -import contextlib import string +import sys +import tempfile +import time +import types import xmlrpclib -import httplib -import shlex + from decimal import Decimal -import asyncore -import copy from functools import partial -from decorator import decorator +from popen2 import Popen4 from itertools import chain from subprocess import check_call, call, PIPE, STDOUT, Popen -from ZODB.POSException import ConflictError -from popen2 import Popen4 -from twisted.internet import task, reactor, defer -from Acquisition import aq_base, aq_inner, aq_parent -from zExceptions import NotFound from AccessControl import getSecurityManager, Unauthorized from AccessControl.ZopeGuards import guarded_getattr -from ZServer.HTTPServer import zhttp_channel +from Acquisition import aq_base, aq_inner, aq_parent +from decorator import decorator +from twisted.internet import task, reactor, defer +from zExceptions import NotFound +from ZODB.POSException import ConflictError from zope.i18n import translate from zope.interface import providedBy -from zope.schema import getFields from zope.schema._field import Password +from zope.schema import getFields +from ZServer.HTTPServer import zhttp_channel from .Exceptions import ZenPathError, ZentinelException from .jsonutils import unjson from .Logger import ( # noqa: F401 - HtmlFormatter, setWebLoggingStream, clearWebLoggingStream, setLogLevel, + clearWebLoggingStream, + HtmlFormatter, + setLogLevel, + setWebLoggingStream, ) from .Threading import ( # noqa: F401 - ThreadInterrupt, InterruptableThread, LineReader, + InterruptableThread, + LineReader, + ThreadInterrupt, ) log = logging.getLogger("zen.Utils") @@ -69,21 +75,21 @@ class DictAsObj(object): def __init__(self, **kwargs): - for k,v in kwargs.iteritems(): setattr(self, k, v) + for k, v in kwargs.iteritems(): + setattr(self, k, v) def convToUnits(number=0, divby=1024.0, unitstr="B"): - """ - Convert a number to its human-readable form. ie: 4GB, 4MB, etc. + """Convert a number to its human-readable form. ie: 4GB, 4MB, etc. - >>> convToUnits() # Don't do this! - '0.0B' - >>> convToUnits(None) # Don't do this! - '' - >>> convToUnits(123456789) - '117.7MB' - >>> convToUnits(123456789, 1000, "Hz") - '123.5MHz' + >>> convToUnits() # Don't do this! + '0.0B' + >>> convToUnits(None) # Don't do this! + '' + >>> convToUnits(123456789) + '117.7MB' + >>> convToUnits(123456789, 1000, "Hz") + '123.5MHz' @param number: base number @type number: number @@ -94,25 +100,25 @@ def convToUnits(number=0, divby=1024.0, unitstr="B"): @return: number with appropriate units @rtype: string """ - units = map(lambda x:x + unitstr, ('','K','M','G','T','P')) + units = map(lambda x: x + unitstr, ("", "K", "M", "G", "T", "P")) try: numb = float(number) except Exception: - return '' + return "" sign = 1 if numb < 0: numb = abs(numb) sign = -1 for unit in units: - if numb < divby: break + if numb < divby: + break numb /= divby return "%.1f%s" % (numb * sign, unit) def travAndColl(obj, toonerel, collect, collectname): - """ - Walk a series of to one rels collecting collectname into collect + """Walk a series of to one rels collecting collectname into collect. @param obj: object inside of Zope @type obj: object @@ -137,8 +143,8 @@ def travAndColl(obj, toonerel, collect, collectname): def getObjByPath(base, path, restricted=0): - """ - Get a Zope object by its path (e.g. '/Devices/Server/Linux'). + """Get a Zope object by its path (e.g. '/Devices/Server/Linux'). + Mostly a stripdown of unrestrictedTraverse method from Zope 2.8.8. @param base: base part of a path @type base: string @@ -160,13 +166,13 @@ def getObjByPath(base, path, restricted=0): if isinstance(path, str): # Unicode paths are not allowed - path = path.split('/') + path = path.split("/") else: path = list(path) - REQUEST = {'TraversalRequestNameStack': path} + REQUEST = {"TraversalRequestNameStack": path} path.reverse() - path_pop=path.pop + path_pop = path.pop if len(path) > 1 and not path[0]: # Remove trailing slash @@ -181,28 +187,28 @@ def getObjByPath(base, path, restricted=0): # If the path starts with an empty string, go to the root first. path_pop() base = base.getPhysicalRoot() - if (restricted - and not securityManager.validate(None, None, None, base)): - raise Unauthorized( base ) + if restricted and not securityManager.validate(None, None, None, base): + raise Unauthorized(base) obj = base while path: name = path_pop() - if name[0] == '_': + if name[0] == "_": # Never allowed in a URL. - raise NotFound( name ) + raise NotFound(name) - if name == '..': + if name == "..": next = aq_parent(obj) if next is not _none: if restricted and not securityManager.validate( - obj, obj,name, next): - raise Unauthorized( name ) + obj, obj, name, next + ): + raise Unauthorized(name) obj = next continue - bobo_traverse = _getattr(obj, '__bobo_traverse__', _none) + bobo_traverse = _getattr(obj, "__bobo_traverse__", _none) if bobo_traverse is not _none: next = bobo_traverse(REQUEST, name) if restricted: @@ -210,7 +216,7 @@ def getObjByPath(base, path, restricted=0): # The object is wrapped, so the acquisition # context is the container. container = aq_parent(aq_inner(next)) - elif _getattr(next, 'im_self', _none) is not _none: + elif _getattr(next, "im_self", _none) is not _none: # Bound method, the bound instance # is the container container = next.im_self @@ -223,7 +229,8 @@ def getObjByPath(base, path, restricted=0): container = _none try: validated = securityManager.validate( - obj, container, name, next) + obj, container, name, next + ) except Unauthorized: # If next is a simple unwrapped property, it's # parentage is indeterminate, but it may have been @@ -231,11 +238,13 @@ def getObjByPath(base, path, restricted=0): # raise an error, and we can explicitly check that # our value was acquired safely. validated = 0 - if container is _none and \ - guarded_getattr(obj, name, marker) is next: + if ( + container is _none + and guarded_getattr(obj, name, marker) is next + ): validated = 1 if not validated: - raise Unauthorized( name ) + raise Unauthorized(name) else: if restricted: next = guarded_getattr(obj, name, marker) @@ -243,20 +252,22 @@ def getObjByPath(base, path, restricted=0): next = _getattr(obj, name, marker) if next is marker: try: - next=obj[name] + next = obj[name] except AttributeError: # Raise NotFound for easier debugging # instead of AttributeError: __getitem__ - raise NotFound( name ) + raise NotFound(name) if restricted and not securityManager.validate( - obj, obj, _none, next): - raise Unauthorized( name ) + obj, obj, _none, next + ): + raise Unauthorized(name) obj = next return obj + def getObjByPath2(base, path, restricted=0): - """ - Get a Zope object by its path (e.g. '/Devices/Server/Linux'). + """Get a Zope object by its path (e.g. '/Devices/Server/Linux'). + Mostly a stripdown of unrestrictedTraverse method from Zope 2.8.8. @param base: base part of a path @@ -277,13 +288,13 @@ def getObjByPath2(base, path, restricted=0): if isinstance(path, str): # Unicode paths are not allowed - path = path.split('/') + path = path.split("/") else: path = list(path) - REQUEST = {'TraversalRequestNameStack': path} + REQUEST = {"TraversalRequestNameStack": path} path.reverse() - path_pop=path.pop + path_pop = path.pop if len(path) > 1 and not path[0]: # Remove trailing slash @@ -298,28 +309,28 @@ def getObjByPath2(base, path, restricted=0): # If the path starts with an empty string, go to the root first. path_pop() base = base.getPhysicalRoot() - if (restricted - and not securityManager.validate(None, None, None, base)): - raise Unauthorized( base ) + if restricted and not securityManager.validate(None, None, None, base): + raise Unauthorized(base) obj = base while path: name = path_pop() - if name[0] == '_': + if name[0] == "_": # Never allowed in a URL. - raise NotFound( name ) + raise NotFound(name) - if name == '..': + if name == "..": next = aq_parent(obj) if next is not _none: if restricted and not securityManager.validate( - obj, obj,name, next): - raise Unauthorized( name ) + obj, obj, name, next + ): + raise Unauthorized(name) obj = next continue - bobo_traverse = _getattr(obj, '__bobo_traverse__', _none) + bobo_traverse = _getattr(obj, "__bobo_traverse__", _none) if bobo_traverse is not _none: next = bobo_traverse(REQUEST, name) if restricted: @@ -327,7 +338,7 @@ def getObjByPath2(base, path, restricted=0): # The object is wrapped, so the acquisition # context is the container. container = aq_parent(aq_inner(next)) - elif _getattr(next, 'im_self', _none) is not _none: + elif _getattr(next, "im_self", _none) is not _none: # Bound method, the bound instance # is the container container = next.im_self @@ -340,7 +351,8 @@ def getObjByPath2(base, path, restricted=0): container = _none try: validated = securityManager.validate( - obj, container, name, next) + obj, container, name, next + ) except Unauthorized: # If next is a simple unwrapped property, it's # parentage is indeterminate, but it may have been @@ -348,83 +360,107 @@ def getObjByPath2(base, path, restricted=0): # raise an error, and we can explicitly check that # our value was acquired safely. validated = 0 - if container is _none and \ - guarded_getattr(obj, name, marker) is next: + if ( + container is _none + and guarded_getattr(obj, name, marker) is next + ): validated = 1 if not validated: - raise Unauthorized( name ) + raise Unauthorized(name) else: - next=obj._getOb(name, None) + next = obj._getOb(name, None) if next is None: raise NotFound(name) if restricted and not securityManager.validate( - obj, obj, _none, next): - raise Unauthorized( name ) + obj, obj, _none, next + ): + raise Unauthorized(name) obj = next return obj def capitalizeFirstLetter(s): - #Don't use .title or .capitalize, as those will lower-case a camel-cased type + # Don't use .title or .capitalize because they lower-case camel-cased names return s[0].capitalize() + s[1:] if s else s RENAME_DISPLAY_TYPES = { - 'RRDTemplate': 'Template', - 'ThresholdClass': 'Threshold', - 'HoltWintersFailure': 'Threshold', # see Trac #29376 + "RRDTemplate": "Template", + "ThresholdClass": "Threshold", + "HoltWintersFailure": "Threshold", # see Trac #29376 } + def getDisplayType(obj): - """ - Get a printable string representing the type of this object - """ + """Get a printable string representing the type of this object.""" # TODO: better implementation, like meta_display_type per class. - typename = str(getattr(obj, 'meta_type', None) or obj.__class__.__name__) if obj else 'None' + typename = ( + str(getattr(obj, "meta_type", None) or obj.__class__.__name__) + if obj + else "None" + ) typename = capitalizeFirstLetter(typename) return RENAME_DISPLAY_TYPES.get(typename, typename) def _getName(obj): - return getattr(obj, 'getName', None) or getattr(obj, 'name', None) or \ - getattr(obj, 'Name', None) + return ( + getattr(obj, "getName", None) + or getattr(obj, "name", None) + or getattr(obj, "Name", None) + ) + def _getId(obj): - return getattr(obj, 'getId', None) or getattr(obj, 'id', None) or \ - getattr(obj, 'Id', None) or getattr(obj, 'ID', None) + return ( + getattr(obj, "getId", None) + or getattr(obj, "id", None) + or getattr(obj, "Id", None) + or getattr(obj, "ID", None) + ) + def _getUid(obj): - return getattr(obj, 'getPrimaryId', None) or getattr(obj, 'uid', None) \ - or getattr(obj, 'Uid', None) or getattr(obj, 'UID', None) + return ( + getattr(obj, "getPrimaryId", None) + or getattr(obj, "uid", None) + or getattr(obj, "Uid", None) + or getattr(obj, "UID", None) + ) + def getDisplayName(obj): - """ - Get a printable string representing the name of this object. + """Get a printable string representing the name of this object. + Always returns something but it may not be pretty. """ # TODO: better implementation, like getDisplayName() per class. - name = obj.titleOrId() if hasattr(obj, 'titleOrId') else \ - _getName(obj) or _getId(obj) or _getUid(obj) + name = ( + obj.titleOrId() + if hasattr(obj, "titleOrId") + else _getName(obj) or _getId(obj) or _getUid(obj) + ) if name is None: - return str(obj) #we tried our best + return str(obj) # we tried our best return str(name() if callable(name) else name) def getDisplayId(obj): - """ - Get a printable string representing an ID of this object. + """Get a printable string representing an ID of this object. + Always returns something but it may not be pretty. """ # TODO: better implementation, like getDisplayId() per class. dispId = _getUid(obj) or _getId(obj) or _getName(obj) if dispId is None: - return str(obj) #we tried our best - return re.sub(r'^/zport/dmd', '', str(dispId() if callable(dispId) else dispId)) + return str(obj) # we tried our best + return re.sub( + r"^/zport/dmd", "", str(dispId() if callable(dispId) else dispId) + ) def checkClass(myclass, className): - """ - Perform issubclass using class name as string + """Perform issubclass using class name as string. @param myclass: generic object @type myclass: object @@ -441,8 +477,7 @@ def checkClass(myclass, className): def lookupClass(productName, classname=None): - """ - look in sys.modules for our class + """Look in sys.modules for our class. @param productName: object in Products @type productName: string @@ -452,23 +487,22 @@ def lookupClass(productName, classname=None): @rtype: object or None """ if productName in sys.modules: - mod = sys.modules[productName] + mod = sys.modules[productName] - elif "Products."+productName in sys.modules: - mod = sys.modules["Products."+productName] + elif "Products." + productName in sys.modules: + mod = sys.modules["Products." + productName] else: - return None + return None if not classname: - classname = productName.split('.')[-1] + classname = productName.split(".")[-1] - return getattr(mod,classname) + return getattr(mod, classname) def importClass(modulePath, classname=""): - """ - Import a class from the module given. + """Import a class from the module given. @param modulePath: path to module in sys.modules @type modulePath: string @@ -478,7 +512,8 @@ def importClass(modulePath, classname=""): @rtype: class """ try: - if not classname: classname = modulePath.split(".")[-1] + if not classname: + classname = modulePath.split(".")[-1] try: __import__(modulePath, globals(), locals(), classname) mod = sys.modules[modulePath] @@ -487,21 +522,32 @@ def importClass(modulePath, classname=""): return getattr(mod, classname) except AttributeError: - raise ImportError("Failed while importing class %s from module %s" % ( - classname, modulePath)) + raise ImportError( + "Failed while importing class %s from module %s" + % (classname, modulePath) + ) def cleanstring(value): - """ - Take the trailing \x00 off the end of a string + """Take the trailing \x00 off the end of a string. + + >>> txt = 'clean' + >>> cleanstring(txt) == txt + True + >>> cleanstring(txt + chr(0)) == txt + True + >>> cleanstring(txt + chr(0) + chr(0)) == txt + True @param value: sample string @type value: string @return: cleaned string @rtype: string """ - if isinstance(value, basestring) and value.endswith('\0'): - value = value[:-1] + if isinstance(value, basestring): + offset = value.find("\0") + if offset >= 0: + value = value[:offset] return value @@ -513,16 +559,19 @@ def getSubObjects(base, filter=None, descend=None, retobjs=None): @param base: base object to start search @type base: object - @param filter: filter to apply to each object to determine if it gets added to the returned list + @param filter: filter to apply to each object to determine if it gets + added to the returned list. @type filter: function or None - @param descend: function to apply to each object to determine whether or not to continue searching + @param descend: function to apply to each object to determine whether or + not to continue searching. @type descend: function or None @param retobjs: list of objects found @type retobjs: list @return: list of objects found @rtype: list """ - if not retobjs: retobjs = [] + if not retobjs: + retobjs = [] for obj in base.objectValues(): if not filter or filter(obj): retobjs.append(obj) @@ -541,24 +590,27 @@ def getSubObjectsMemo(base, filter=None, descend=None, memo={}): @param base: base object to start search @type base: object - @param filter: filter to apply to each object to determine if it gets added to the returned list + @param filter: filter to apply to each object to determine if it gets + added to the returned list. @type filter: function or None - @param descend: function to apply to each object to determine whether or not to continue searching + @param descend: function to apply to each object to determine whether or + not to continue searching. @type descend: function or None @param memo: dictionary of objects found (unused) @type memo: dictionary @return: list of objects found @rtype: list """ - from Products.ZenRelations.RelationshipManager \ - import RelationshipManager + from Products.ZenRelations.RelationshipManager import RelationshipManager + if base.meta_type == "To One Relationship": objs = [base.obj] else: objs = base.objectValues() for obj in objs: - if (isinstance(obj, RelationshipManager) and - not obj.getPrimaryDmdId().startswith(base.getPrimaryDmdId())): + if isinstance( + obj, RelationshipManager + ) and not obj.getPrimaryDmdId().startswith(base.getPrimaryDmdId()): continue if not filter or filter(obj): yield obj @@ -568,8 +620,7 @@ def getSubObjectsMemo(base, filter=None, descend=None, memo={}): def getAllConfmonObjects(base): - """ - Get all ZenModelRM objects in database + """Get all ZenModelRM objects in database. @param base: base object to start searching @type base: object @@ -578,26 +629,26 @@ def getAllConfmonObjects(base): """ from Products.ZenModel.ZenModelRM import ZenModelRM from Products.ZenModel.ZenModelBase import ZenModelBase - from Products.ZenRelations.ToManyContRelationship \ - import ToManyContRelationship - from Products.ZenRelations.ToManyRelationship \ - import ToManyRelationship - from Products.ZenRelations.ToOneRelationship \ - import ToOneRelationship + from Products.ZenRelations.ToManyContRelationship import ( + ToManyContRelationship, + ) + from Products.ZenRelations.ToManyRelationship import ToManyRelationship + from Products.ZenRelations.ToOneRelationship import ToOneRelationship def descend(obj): - """ - Function to determine whether or not to continue searching + """Function to determine whether or not to continue searching. + @param obj: object @type obj: object @return: True if we want to keep searching @rtype: boolean """ return ( - isinstance(obj, ZenModelBase) or - isinstance(obj, ToManyContRelationship) or - isinstance(obj, ToManyRelationship) or - isinstance(obj, ToOneRelationship)) + isinstance(obj, ZenModelBase) + or isinstance(obj, ToManyContRelationship) + or isinstance(obj, ToManyRelationship) + or isinstance(obj, ToOneRelationship) + ) def filter(obj): """ @@ -615,8 +666,12 @@ def filter(obj): def zenpathsplit(pathstring): - """ - Split a zen path and clean up any blanks or bogus spaces in it + """Return the parts of a path with extraneous spaces removed. + + >>> zenpathsplit('/zport/dmd/Devices') + ['zport', 'dmd', 'Devices'] + >>> zenpathsplit(' a /b / c') + ['a', 'b', 'c'] @param pathstring: a path inside of ZENHOME @type pathstring: string @@ -629,17 +684,23 @@ def zenpathsplit(pathstring): return path - def zenpathjoin(pathar): - """ - Build a zenpath in its string form + """Return a string that is the path formed from its parts. + + The returned path is always an absolute path. + + >>> zenpathjoin(('zport', 'dmd', 'Devices', 'Server')) + '/zport/dmd/Devices/Server' + >>> zenpathjoin(('', 'zport', 'dmd', 'Devices', 'Server')) + '/zport/dmd/Devices/Server' @param pathar: a path @type pathar: string @return: a path @rtype: string """ - return "/" + "/".join(pathar) + path = "/".join(pathar) + return path if path.startswith("/") else "/" + path def createHierarchyObj(root, name, factory, relpath="", llog=None): @@ -654,7 +715,8 @@ def createHierarchyObj(root, name, factory, relpath="", llog=None): @type name: string @param factory: factory object to create @type factory: factory object - @param relpath: relationship within which we will recurse as objects are created, if any + @param relpath: relationship within which we will recurse as objects are + created, if any. @type relpath: object @param llog: unused @type llog: object @@ -664,13 +726,16 @@ def createHierarchyObj(root, name, factory, relpath="", llog=None): unused(llog) rootName = root.id for id in zenpathsplit(name): - if id == rootName: continue + if id == rootName: + continue if id == relpath or getattr(aq_base(root), relpath, False): root = getattr(root, relpath) if not getattr(aq_base(root), id, False): if id == relpath: raise AttributeError("relpath %s not found" % relpath) - log.debug("Creating object with id %s in object %s", id, root.getId()) + log.debug( + "Creating object with id %s in object %s", id, root.getId() + ) newobj = factory(id) root._setObject(id, newobj) root = getattr(root, id) @@ -679,14 +744,14 @@ def createHierarchyObj(root, name, factory, relpath="", llog=None): def getHierarchyObj(root, name, relpath=None): - """ - Return an object using its path relations are optional in the path. + """Return an object using its path relations are optional in the path. @param root: root from which to start @type root: object @param name: path to object @type name: string - @param relpath: relationship within which we will recurse as objects are created, if any + @param relpath: relationship within which we will recurse as objects are + created, if any. @type relpath: object @return: root object of a hierarchy @rtype: object @@ -695,14 +760,15 @@ def getHierarchyObj(root, name, relpath=None): if id == relpath or getattr(aq_base(root), relpath, False): root = getattr(root, relpath) if not getattr(root, id, False): - raise ZenPathError("Path %s id %s not found on object %s" % - (name, id, root.getPrimaryId())) + raise ZenPathError( + "Path %s id %s not found on object %s" + % (name, id, root.getPrimaryId()) + ) root = getattr(root, id, None) return root - def basicAuthUrl(username, password, url): """ Add the username and password to a url in the form @@ -717,16 +783,15 @@ def basicAuthUrl(username, password, url): @return: URL with auth information incorporated @rtype: string """ - urlar = url.split('/') - if not username or not password or urlar[2].find('@') > -1: + urlar = url.split("/") + if not username or not password or urlar[2].find("@") > -1: return url urlar[2] = "%s:%s@%s" % (username, password, urlar[2]) return "/".join(urlar) - -def prepId(id, subchar='_'): - """ +def prepId(id, subchar="_"): + r""" Make an id with valid url characters. Subs [^a-zA-Z0-9-_,.$\(\) ] with subchar. If id then starts with subchar it is removed. @@ -735,22 +800,24 @@ def prepId(id, subchar='_'): @return: valid id @rtype: string """ - _prepId = re.compile(r'[^a-zA-Z0-9-_,.$\(\) ]').sub + _prepId = re.compile(r"[^a-zA-Z0-9-_,.$\(\) ]").sub _cleanend = re.compile(r"%s+$" % subchar).sub if id is None: - raise ValueError('Ids can not be None') + raise ValueError("Ids can not be None") if not isinstance(id, basestring): id = str(id) id = _prepId(subchar, id) while id.startswith(subchar): - if len(id) > 1: id = id[1:] - else: id = "-" - id = _cleanend("",id) - id = id.lstrip(string.whitespace + '_').rstrip() + if len(id) > 1: + id = id[1:] + else: + id = "-" + id = _cleanend("", id) + id = id.lstrip(string.whitespace + "_").rstrip() return str(id) -def sendEmail(emsg, host, port=25, usetls=0, usr='', pwd=''): +def sendEmail(emsg, host, port=25, usetls=0, usr="", pwd=""): """ Send an email. Return a tuple: (sucess, message) where sucess is True or False. @@ -771,24 +838,28 @@ def sendEmail(emsg, host, port=25, usetls=0, usr='', pwd=''): @rtype: tuple """ import smtplib - fromaddr = emsg['From'] - toaddr = map(lambda x: x.strip(), emsg['To'].split(',')) + + fromaddr = emsg["From"] + toaddr = map(lambda x: x.strip(), emsg["To"].split(",")) try: server = smtplib.SMTP(host, port, timeout=DEFAULT_SOCKET_TIMEOUT) if usetls: server.ehlo() server.starttls() server.ehlo() - if len(usr): server.login(usr, pwd) + if len(usr): + server.login(usr, pwd) server.sendmail(fromaddr, toaddr, emsg.as_string()) # Need to catch the quit because some servers using TLS throw an # EOF error on quit, so the email gets sent over and over - try: server.quit() - except Exception: pass + try: + server.quit() + except Exception: + pass except (smtplib.SMTPException, socket.error, socket.timeout): - result = (False, '%s - %s' % tuple(sys.exc_info()[:2])) + result = (False, "%s - %s" % tuple(sys.exc_info()[:2])) else: - result = (True, '') + result = (True, "") return result @@ -807,14 +878,17 @@ def sendPage(recipient, msg, pageCommand, deferred=False): @rtype: tuple """ import subprocess + env = dict(os.environ) env["RECIPIENT"] = recipient msg = str(msg) - p = subprocess.Popen(pageCommand, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - shell=True, - env=env) + p = subprocess.Popen( + pageCommand, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + shell=True, + env=env, + ) p.stdin.write(msg) p.stdin.close() response = p.stdout.read() @@ -822,8 +896,7 @@ def sendPage(recipient, msg, pageCommand, deferred=False): def zdecode(context, value): - """ - Convert a string using the decoding found in zCollectorDecoding + """Convert a string using the decoding found in zCollectorDecoding. @param context: Zope object @type context: object @@ -833,14 +906,14 @@ def zdecode(context, value): @rtype: string """ if isinstance(value, str): - decoding = getattr(context, 'zCollectorDecoding', 'utf-8') + decoding = getattr(context, "zCollectorDecoding", "utf-8") value = value.decode(decoding) return value def localIpCheck(context, ip): - """ - Test to see if an IP should not be included in the network map. + """Test to see if an IP should not be included in the network map. + Uses the zLocalIpAddresses to decide. @param context: Zope object @@ -850,11 +923,12 @@ def localIpCheck(context, ip): @return: regular expression match or None (if not found) @rtype: re match object """ - return re.search(getattr(context, 'zLocalIpAddresses', '^$'), ip) + return re.search(getattr(context, "zLocalIpAddresses", "^$"), ip) + def localInterfaceCheck(context, intname): - """ - Test to see if an interface should not be included in the network map. + """Test to see if an interface should not be included in the network map. + Uses the zLocalInterfaceNames to decide. @param context: Zope object @@ -864,14 +938,13 @@ def localInterfaceCheck(context, intname): @return: regular expression match or None (if not found) @rtype: re match object """ - return re.search(getattr(context, 'zLocalInterfaceNames', '^$'), intname) + return re.search(getattr(context, "zLocalInterfaceNames", "^$"), intname) def cmpClassNames(obj, classnames): """ - Check to see if any of an object's base classes - are in a list of class names. Like isinstance(), - but without requiring a class to compare against. + Check to see if any of an object's base classes are in a list of class + names. Like isinstance(), but without requiring a class to compare against. @param obj: object @type obj: object @@ -886,12 +959,11 @@ def cmpClassNames(obj, classnames): thisclass = x.pop() x.extend(thisclass.__bases__) finalnames.add(thisclass.__name__) - return bool( set(classnames).intersection(finalnames) ) + return bool(set(classnames).intersection(finalnames)) def resequence(context, objects, seqmap, origseq, REQUEST): - """ - Resequence a seqmap + """Resequence a seqmap. @param context: Zope object @type context: object @@ -925,21 +997,19 @@ def resequence(context, objects, seqmap, origseq, REQUEST): def cleanupSkins(dmd): - """ - Prune out objects + """Prune out objects. @param dmd: Device Management Database @type dmd: DMD object """ ps = dmd.getPhysicalRoot().zport.portal_skins layers = ps._objects - layers = filter(lambda x:getattr(ps, x['id'], False), layers) + layers = filter(lambda x: getattr(ps, x["id"], False), layers) ps._objects = tuple(layers) def edgesToXML(edges, start=()): - """ - Convert edges to an XML file + """Convert edges to an XML file. @param edges: edges @type edges: list @@ -957,73 +1027,126 @@ def edgesToXML(edges, start=()): node1 = nodet % (a_id, a_title, a_icon_path, a_color) node2 = nodet % (b_id, b_title, b_icon_path, b_color) edge1 = edget % (a_title, b_id) - if node1 not in nodeels: nodeels.append(node1) - if node2 not in nodeels: nodeels.append(node2) - if edge1 not in edgeels: edgeels.append(edge1) + if node1 not in nodeels: + nodeels.append(node1) + if node2 not in nodeels: + nodeels.append(node2) + if edge1 not in edgeels: + edgeels.append(edge1) xmlels.extend(nodeels) xmlels.extend(edgeels) - xmldoc = "%s" % ''.join(list(xmlels)) + xmldoc = "%s" % "".join(list(xmlels)) return xmldoc -def sane_pathjoin(base_path, *args ): - """ - Joins paths in a saner manner than os.path.join() - - @param base_path: base path to assume everything is rooted from +def _normalize_path(path): + """Return the given path sans extraneous spaces and slashes. + + Trailing slashes are removed. + + >>> _normalize_path('a') + 'a' + >>> _normalize_path('/a') + '/a' + >>> _normalize_path('/a/b/') + '/a/b' + >>> _normalize_path('a/b/') + 'a/b' + >>> _normalize_path('a//b/') + 'a/b' + >>> _normalize_path('//a//b/') + '/a/b' + >>> _normalize_path(' / a / b / ') + '/a/b' + >>> _normalize_path(' a / b / ') + 'a/b' + >>> _normalize_path(' a / b ') + 'a/b' + """ + # removes leading/trailing spaces from path parts and removes path parts + # that are empty strings. + parts = [p.strip() for p in path.split("/")] + # Never eliminate the first part + return "/".join(parts[0:1] + [p for p in parts[1:] if p]) + + +def sane_pathjoin(base_path, *args): + """Returns a path string constructed from the arguments. + + The first argument ('base_path') is always the root part of the path. + This differs from os.path.join's behavior of discarding earlier path + parts if later path parts have a leading slash. + + The base_path and *args are two paths to be joined. If the left-most + parts of *args matches base_path, only the parts after the match are + used in the resulting path. + + >>> sane_pathjoin('a') + 'a' + >>> sane_pathjoin('/a') + '/a' + >>> sane_pathjoin('/a', 'b', 'c') + '/a/b/c' + >>> sane_pathjoin('/a', '/b', '/c') + '/a/b/c' + >>> sane_pathjoin('/a', 'b', '/c') + '/a/b/c' + >>> sane_pathjoin('a', 'b', 'c') + 'a/b/c' + >>> sane_pathjoin('a', '/b', '/c') + 'a/b/c' + >>> sane_pathjoin('a', 'b', '/c') + 'a/b/c' + >>> sane_pathjoin('a', '') + 'a' + >>> sane_pathjoin('a', '', 'b') + 'a/b' + >>> sane_pathjoin('/a ', ' b ', '/ c', '/d ') + '/a/b/c/d' + >>> sane_pathjoin('/a/b', '/a/b', 'c') + '/a/b/c' + >>> sane_pathjoin('/a/b', 'a', 'b', 'c') + '/a/b/c' + >>> sane_pathjoin('a/b', '/a/b', 'c') + 'a/b/c' + >>> sane_pathjoin('a/b', 'a', 'b', 'c') + 'a/b/c' + + @param base_path: Base path to assume everything is rooted from. @type base_path: string - @param *args: path components starting from $ZENHOME - @type *args: strings + @param *args: Path parts that follow base_path. + @type *args: Sequence of strings @return: sanitized path @rtype: string """ - path = base_path - if args: - # Hugely bizarre (but documented!) behaviour with os.path.join() - # >>> import os.path - # >>> os.path.join( '/blue', 'green' ) - # '/blue/green' - # >>> os.path.join( '/blue', '/green' ) - # '/green' - # Work around the brain damage... - base = args[0] - if base.startswith( base_path ): - path_args = [ base ] + [a.strip('/') for a in args[1:] if a != '' ] - else: - path_args = [a.strip('/') for a in args if a != '' ] - - # Empty strings get thrown out so we may not have anything - if len(path_args) > 0: - # What if the user splits up base_path and passes it in? - pathological_case = os.path.join( *path_args ) - if pathological_case.startswith( base_path ): - pass - - elif not base.startswith( base_path ): - path_args.insert( 0, base_path ) - - # Note: passing in a list to os.path.join() returns a list, - # again completely unlike string join() - path = os.path.join( *path_args ) - - # os.path.join( '/blue', '' ) returns '/blue/' -- egads! - return path.rstrip('/') + root = _normalize_path(base_path) + subpath = _normalize_path("/".join(args)) + if subpath: + # subpath should always be a relative path. + if subpath[0] == "/": + subpath = subpath[1:] + # Get a relative path from the root path. + relbase = root[1:] if root[0:1] == "/" else root + if relbase and subpath.startswith(relbase): + subpath = subpath[len(relbase) + 1 :] + return "/".join((root, subpath)) + return root def varPath(*args): + """Return a path relative to /var/zenoss specified by joining args. + + The path is not guaranteed to exist on the filesystem. """ - Return a path relative to /var/zenoss specified by joining args. As with - zenPath(), the path is not guaranteed to exist on the filesystem. - """ - return sane_pathjoin('/var/zenoss', *args) + return sane_pathjoin("/var/zenoss", *args) def zenPath(*args): - """ - Return a path relative to $ZENHOME specified by joining args. The path - is not guaranteed to exist on the filesystem. + """Return a path relative to $ZENHOME specified by joining args. + + The path is not guaranteed to exist on the filesystem. >>> import os >>> zenHome = os.environ['ZENHOME'] @@ -1044,7 +1167,8 @@ def zenPath(*args): True >>> zenPath(zenPath('Products')) == zenPath('Products') True - >>> zenPath(zenPath('Products'), 'orange', 'blue' ) == zenPath('Products', 'orange', 'blue' ) + >>> zenPath(zenPath('Products'), 'orange', 'blue' ) \ + == zenPath('Products', 'orange', 'blue' ) True # Pathological case @@ -1054,16 +1178,17 @@ def zenPath(*args): @param *args: path components starting from $ZENHOME @type *args: strings - @todo: determine what the correct behaviour should be if $ZENHOME is a symlink! + @todo: determine what the correct behaviour should be if $ZENHOME + is a symlink! """ - zenhome = os.environ.get( 'ZENHOME', '' ) + zenhome = os.environ.get("ZENHOME", "") - path = sane_pathjoin( zenhome, *args ) + path = sane_pathjoin(zenhome, *args) - #test if ZENHOME based path exists and if not try bitrock-style path. - #if neither exists return the ZENHOME-based path + # test if ZENHOME based path exists and if not try bitrock-style path. + # if neither exists return the ZENHOME-based path if not os.path.exists(path): - brPath = os.path.realpath(os.path.join(zenhome, '..', 'common')) + brPath = os.path.realpath(os.path.join(zenhome, "..", "common")) testPath = sane_pathjoin(brPath, *args) if os.path.exists(testPath): path = testPath @@ -1088,8 +1213,8 @@ def zopePath(*args): @param *args: path components starting from $ZOPEHOME @type *args: strings """ - zopehome = os.environ.get('ZOPEHOME', '') - return sane_pathjoin( zopehome, *args ) + zopehome = os.environ.get("ZOPEHOME", "") + return sane_pathjoin(zopehome, *args) def binPath(fileName): @@ -1110,17 +1235,21 @@ def binPath(fileName): @rtype: string """ # bin and libexec are the usual suspect locations - paths = [zenPath(d, fileName) for d in ('bin', 'libexec')] + paths = [zenPath(d, fileName) for d in ("bin", "libexec")] # $ZOPEHOME/bin is an additional option for appliance - paths.append(zopePath('bin', fileName)) - # also check the standard locations for Nagios plugins (/usr/lib(64)/nagios/plugins) - paths.extend(sane_pathjoin(d, fileName) for d in ('/usr/lib/nagios/plugins', - '/usr/lib64/nagios/plugins')) + paths.append(zopePath("bin", fileName)) + # Also check the standard locations for Nagios plugins + # (/usr/lib(64)/nagios/plugins) + paths.extend( + sane_pathjoin(d, fileName) + for d in ("/usr/lib/nagios/plugins", "/usr/lib64/nagios/plugins") + ) for path in paths: if os.path.isfile(path): return path - return '' + return "" + def extractPostContent(REQUEST): """ @@ -1140,39 +1269,36 @@ def extractPostContent(REQUEST): # IE return REQUEST.form.keys()[0] except Exception: - return '' + return "" def unused(*args): - """ - A no-op function useful for shutting up pychecker + """A no-op function useful for shutting up pychecker. @param *args: arbitrary arguments @type *args: objects @return: count of the objects @rtype: integer """ - return len(args) + pass def isXmlRpc(REQUEST): - """ - Did we receive a XML-RPC call? + """Did we receive a XML-RPC call? @param REQUEST: Zope REQUEST object @type REQUEST: Zope REQUEST object @return: True if REQUEST is an XML-RPC call @rtype: boolean """ - if REQUEST and REQUEST['CONTENT_TYPE'].find('xml') > -1: + if REQUEST and REQUEST["CONTENT_TYPE"].find("xml") > -1: return True else: return False def setupLoggingHeader(context, REQUEST): - """ - Extract out the 2nd outermost table + """Extract out the 2nd outermost table. @param context: Zope object @type context: Zope object @@ -1193,8 +1319,7 @@ def setupLoggingHeader(context, REQUEST): def executeCommand(cmd, REQUEST, write=None): - """ - Execute the command and return the output + """Execute the command and return the output. @param cmd: command to execute @type cmd: string @@ -1211,11 +1336,13 @@ def executeCommand(cmd, REQUEST, write=None): else: response = sys.stdout if write is None: + def _write(s): response.write(s) response.flush() + write = _write - log.info('Executing command: %s', ' '.join(cmd)) + log.info("Executing command: %s", " ".join(cmd)) f = Popen4(cmd) while 1: s = f.fromchild.readline() @@ -1226,20 +1353,22 @@ def _write(s): else: log.info(s) except ZentinelException as e: - if xmlrpc: return 1 + if xmlrpc: + return 1 log.critical(e) except Exception: - if xmlrpc: return 1 + if xmlrpc: + return 1 raise else: result = f.wait() - result = int(hex(result)[:-2], 16) + result = int(hex(result)[2:], 16) return result def ipsort(a, b): - """ - Compare (cmp()) a + b's IP addresses + """Compare (cmp()) a + b's IP addresses. + These addresses may contain subnet mask info. @param a: IP address @@ -1250,16 +1379,19 @@ def ipsort(a, b): @rtype: boolean """ # Use 0.0.0.0 instead of blank string - if not a: a = "0.0.0.0" - if not b: b = "0.0.0.0" + if not a: + a = "0.0.0.0" + if not b: + b = "0.0.0.0" # Strip off netmasks - a, b = map(lambda x:x.rsplit("/")[0], (a, b)) + a, b = map(lambda x: x.rsplit("/")[0], (a, b)) return cmp(*map(socket.inet_aton, (a, b))) + def ipsortKey(a): - """ - Key function to replace cmp version of ipsort + """Key function to replace cmp version of ipsort. + @param a: IP address @type a: string @return: result of socket.inet_aton(a.ip) @@ -1267,12 +1399,12 @@ def ipsortKey(a): """ if not a: a = "0.0.0.0" - a = a.rsplit('/')[0] + a = a.rsplit("/")[0] return socket.inet_aton(a) + def unsigned(v): - """ - Convert negative 32-bit values into the 2's complement unsigned value + """Convert negative 32-bit values into the 2's complement unsigned value. >>> str(unsigned(-1)) '4294967295' @@ -1304,8 +1436,7 @@ def nanToNone(value): def executeStreamCommand(cmd, writefunc, timeout=30): - """ - Execute cmd in the shell and send the output to writefunc. + """Execute cmd in the shell and send the output to writefunc. @param cmd: command to execute @type cmd: string @@ -1320,46 +1451,45 @@ def executeStreamCommand(cmd, writefunc, timeout=30): pollPeriod = 1 endtime = time.time() + timeout firstPass = True - while time.time() < endtime and ( - firstPass or child.poll()==-1): + while time.time() < endtime and (firstPass or child.poll() == -1): firstPass = False - r,w,e = select.select([child.fromchild],[],[],pollPeriod) + r, w, e = select.select([child.fromchild], [], [], pollPeriod) if r: t = child.fromchild.read() if t: writefunc(t) - if child.poll()==-1: - writefunc('Command timed out') + if child.poll() == -1: + writefunc("Command timed out") import signal + os.kill(child.pid, signal.SIGKILL) def monkeypatch(target): - """ - A decorator to patch the decorated function into the given class. - - >>> @monkeypatch('Products.ZenModel.DataRoot.DataRoot') - ... def do_nothing_at_all(self): - ... print "I do nothing at all." - ... - >>> from Products.ZenModel.DataRoot import DataRoot - >>> hasattr(DataRoot, 'do_nothing_at_all') - True - >>> DataRoot('dummy').do_nothing_at_all() - I do nothing at all. + """A decorator to patch the decorated function into the given class. + + >>> @monkeypatch('Products.ZenModel.DataRoot.DataRoot') + ... def do_nothing_at_all(self): + ... print "I do nothing at all." + ... + >>> from Products.ZenModel.DataRoot import DataRoot + >>> hasattr(DataRoot, 'do_nothing_at_all') + True + >>> DataRoot('dummy').do_nothing_at_all() + I do nothing at all. You can also call the original within the new method using a special variable available only locally. - >>> @monkeypatch('Products.ZenModel.DataRoot.DataRoot') - ... def getProductName(self): - ... print "Doing something additional." - ... return 'core' or original(self) - ... - >>> from Products.ZenModel.DataRoot import DataRoot - >>> DataRoot('dummy').getProductName() - Doing something additional. - 'core' + >>> @monkeypatch('Products.ZenModel.DataRoot.DataRoot') + ... def getProductName(self): + ... print "Doing something additional." + ... return 'core' or original(self) + ... + >>> from Products.ZenModel.DataRoot import DataRoot + >>> DataRoot('dummy').getProductName() + Doing something additional. + 'core' You can also stack monkeypatches. @@ -1382,8 +1512,9 @@ def monkeypatch(target): @rtype: function """ if isinstance(target, basestring): - mod, klass = target.rsplit('.', 1) + mod, klass = target.rsplit(".", 1) target = importClass(mod, klass) + def patcher(func): original = getattr(target, func.__name__, None) if original is None: @@ -1391,19 +1522,22 @@ def patcher(func): return func new_globals = copy.copy(func.func_globals) - new_globals['original'] = original - new_func = types.FunctionType(func.func_code, - globals=new_globals, - name=func.func_name, - argdefs=func.func_defaults, - closure=func.func_closure) + new_globals["original"] = original + new_func = types.FunctionType( + func.func_code, + globals=new_globals, + name=func.func_name, + argdefs=func.func_defaults, + closure=func.func_closure, + ) setattr(target, func.__name__, new_func) return func + return patcher + def nocache(f): - """ - Decorator to set headers which force browser to not cache request + """Decorator to set headers which force browser to not cache request. This is intended to decorate methods of BrowserViews. @@ -1412,9 +1546,9 @@ def nocache(f): @return: decorator function return @rtype: function """ + def inner(self, *args, **kwargs): - """ - Inner portion of the decorator + """Inner portion of the decorator. @param *args: arguments @type *args: possible list @@ -1423,18 +1557,22 @@ def inner(self, *args, **kwargs): @return: decorator function return @rtype: function """ - self.request.response.setHeader('Cache-Control', 'no-cache, must-revalidate') - self.request.response.setHeader('Pragma', 'no-cache') - self.request.response.setHeader('Expires', 'Sat, 13 May 2006 18:02:00 GMT') + self.request.response.setHeader( + "Cache-Control", "no-cache, must-revalidate" + ) + self.request.response.setHeader("Pragma", "no-cache") + self.request.response.setHeader( + "Expires", "Sat, 13 May 2006 18:02:00 GMT" + ) # Get rid of kw used to prevent browser caching - kwargs.pop('_dc', None) + kwargs.pop("_dc", None) return f(self, *args, **kwargs) return inner + def formreq(f): - """ - Decorator to pass in request.form information as arguments to a method. + """Decorator to pass in request.form information as arguments to a method. These are intended to decorate methods of BrowserViews. @@ -1443,9 +1581,9 @@ def formreq(f): @return: decorator function return @rtype: function """ + def inner(self, *args, **kwargs): - """ - Inner portion of the decorator + """Inner portion of the decorator. @param *args: arguments @type *args: possible list @@ -1454,7 +1592,7 @@ def inner(self, *args, **kwargs): @return: decorator function return @rtype: function """ - if self.request.REQUEST_METHOD=='POST': + if self.request.REQUEST_METHOD == "POST": content = extractPostContent(self.request) try: args += (unjson(content),) @@ -1463,9 +1601,9 @@ def inner(self, *args, **kwargs): else: kwargs.update(self.request.form) # Get rid of useless Zope thing that appears when no querystring - kwargs.pop('-C', None) + kwargs.pop("-C", None) # Get rid of kw used to prevent browser caching - kwargs.pop('_dc', None) + kwargs.pop("_dc", None) return f(self, *args, **kwargs) return inner @@ -1479,20 +1617,21 @@ class Singleton(type): of the class itself, then checking that attribute for later constructor calls. """ + def __init__(cls, *args, **kwargs): super(Singleton, cls).__init__(*args, **kwargs) cls._singleton_instance = None def __call__(cls, *args, **kwargs): if cls._singleton_instance is None: - cls._singleton_instance = super( - Singleton, cls).__call__(*args, **kwargs) + cls._singleton_instance = super(Singleton, cls).__call__( + *args, **kwargs + ) return cls._singleton_instance def readable_time(seconds, precision=1): - """ - Convert some number of seconds into a human-readable string. + """Convert some number of seconds into a human-readable string. @param seconds: The number of seconds to convert @type seconds: int @@ -1500,42 +1639,49 @@ def readable_time(seconds, precision=1): @type precision: int @rtype: str - >>> readable_time(None) - '0 seconds' - >>> readable_time(0) - '0 seconds' - >>> readable_time(0.12) - '0 seconds' - >>> readable_time(1) - '1 second' - >>> readable_time(1.5) - '1 second' - >>> readable_time(60) - '1 minute' - >>> readable_time(60*60*3+12) - '3 hours' - >>> readable_time(60*60*3+12, 2) - '3 hours 12 seconds' - + >>> readable_time(None) + '0 seconds' + >>> readable_time(0) + '0 seconds' + >>> readable_time(0.12) + '0 seconds' + >>> readable_time(1) + '1 second' + >>> readable_time(1.5) + '1 second' + >>> readable_time(60) + '1 minute' + >>> readable_time(60*60*3+12) + '3 hours' + >>> readable_time(60*60*3+12, 2) + '3 hours 12 seconds' """ if seconds is None: - return '0 seconds' + return "0 seconds" remaining = abs(seconds) if remaining < 1: - return '0 seconds' - - names = ('year', 'month', 'week', 'day', 'hour', 'minute', 'second') - mults = (60*60*24*365, 60*60*24*30, 60*60*24*7, 60*60*24, 60*60, 60, 1) + return "0 seconds" + + names = ("year", "month", "week", "day", "hour", "minute", "second") + mults = ( + 60 * 60 * 24 * 365, + 60 * 60 * 24 * 30, + 60 * 60 * 24 * 7, + 60 * 60 * 24, + 60 * 60, + 60, + 1, + ) result = [] for name, div in zip(names, mults): - num = Decimal(str(math.floor(remaining/div))) - remaining -= int(num)*div + num = Decimal(str(math.floor(remaining / div))) + remaining -= int(num) * div num = int(num) if num: - result.append('%d %s%s' %(num, name, num>1 and 's' or '')) - if len(result)==precision: + result.append("%d %s%s" % (num, name, num > 1 and "s" or "")) + if len(result) == precision: break - return ' '.join(result) + return " ".join(result) def relative_time(t, precision=1, cmptime=None): @@ -1552,32 +1698,30 @@ def relative_time(t, precision=1, cmptime=None): @type cmptime: int @rtype: str - >>> relative_time(time.time() - 60*10) - '10 minutes ago' - >>> relative_time(time.time() - 60*10-3, precision=2) - '10 minutes 3 seconds ago' - >>> relative_time(time.time() - 60*60*24*10, precision=2) - '1 week 3 days ago' - >>> relative_time(time.time() - 60*60*24*365-1, precision=2) - '1 year 1 second ago' - >>> relative_time(time.time() + 1 + 60*60*24*7*2) # Add 1 for rounding - 'in 2 weeks' - + >>> relative_time(time.time() - 60*10) + '10 minutes ago' + >>> relative_time(time.time() - 60*10-3, precision=2) + '10 minutes 3 seconds ago' + >>> relative_time(time.time() - 60*60*24*10, precision=2) + '1 week 3 days ago' + >>> relative_time(time.time() - 60*60*24*365-1, precision=2) + '1 year 1 second ago' + >>> relative_time(time.time() + 1 + 60*60*24*7*2) # Add 1 for rounding + 'in 2 weeks' """ if cmptime is None: cmptime = time.time() seconds = Decimal(str(t - cmptime)) result = readable_time(seconds, precision) if seconds < 0: - result += ' ago' + result += " ago" else: - result = 'in ' + result + result = "in " + result return result def is_browser_connection_open(request): - """ - Check to see if the TCP connection to the browser is still open. + """Check to see if the TCP connection to the browser is still open. This might be used to interrupt an infinite while loop, which would preclude the thread from being destroyed even though the connection has @@ -1596,15 +1740,22 @@ def is_browser_connection_open(request): EXIT_CODE_MAPPING = { - 0:'Success', - 1:'General error', - 2:'Misuse of shell builtins', - 126:'Command invoked cannot execute, permissions problem or command is not an executable', - 127:'Command not found', - 128:'Invalid argument to exit, exit takes only integers in the range 0-255', - 130:'Fatal error signal: 2, Command terminated by Control-C' + 0: "Success", + 1: "General error", + 2: "Misuse of shell builtins", + 126: ( + "Command invoked cannot execute, permissions problem or command " + "is not an executable" + ), + 127: "Command not found", + 128: ( + "Invalid argument to exit, exit takes only integers in the " + "range 0-255" + ), + 130: "Fatal error signal: 2, Command terminated by Control-C", } + def getExitMessage(exitCode): """ Return a nice exit message that corresponds to the given exit status code @@ -1617,32 +1768,35 @@ def getExitMessage(exitCode): if exitCode in EXIT_CODE_MAPPING.keys(): return EXIT_CODE_MAPPING[exitCode] elif exitCode >= 255: - return 'Exit status out of range, exit takes only integer arguments in the range 0-255' + return ( + "Exit status out of range, exit takes only integer arguments " + "in the range 0-255" + ) elif exitCode > 128: - return 'Fatal error signal: %s' % (exitCode-128) - return 'Unknown error code: %s' % exitCode + return "Fatal error signal: %s" % (exitCode - 128) + return "Unknown error code: %s" % exitCode def set_context(ob): - """ - Wrap an object in a REQUEST context. - """ + """Wrap an object in a REQUEST context.""" from ZPublisher.HTTPRequest import HTTPRequest from ZPublisher.HTTPResponse import HTTPResponse from ZPublisher.BaseRequest import RequestContainer + resp = HTTPResponse(stdout=None) env = { - 'SERVER_NAME':'localhost', - 'SERVER_PORT':'8080', - 'REQUEST_METHOD':'GET' - } + "SERVER_NAME": "localhost", + "SERVER_PORT": "8080", + "REQUEST_METHOD": "GET", + } req = HTTPRequest(None, env, resp) - return ob.__of__(RequestContainer(REQUEST = req)) + return ob.__of__(RequestContainer(REQUEST=req)) + def dumpCallbacks(deferred): - """ - Dump the callback chain of a Twisted Deferred object. The chain will be - displayed on standard output. + """Dump the callback chain of a Twisted Deferred object. + + The chain will be displayed on standard output. @param deferred: the twisted Deferred object to dump @type deferred: a Deferred object @@ -1658,9 +1812,6 @@ def dumpCallbacks(deferred): print "%-39.39s %-39.39s" % (callbackName, errbackName) -# add __iter__ method to LazyMap (used to implement catalog queries) to handle -# errors while iterating over the query results using __getitem__ -from Products.ZCatalog.Lazy import LazyMap def LazyMap__iter__(self): for i in range(len(self._seq)): try: @@ -1672,7 +1823,17 @@ def LazyMap__iter__(self): except IndexError: break -LazyMap.__iter__ = LazyMap__iter__ + +def _monkeypath_LazyMap__iter__(): + # Add __iter__ method to LazyMap (used to implement catalog queries) to + # handle errors while iterating over the query results using __getitem__. + from Products.ZCatalog.Lazy import LazyMap + + LazyMap.__iter__ = LazyMap__iter__ + + +_monkeypath_LazyMap__iter__() + def getObjectsFromCatalog(catalog, query=None, log=None): """ @@ -1708,13 +1869,12 @@ def getObjectsFromModelCatalog(catalog, query=None, log=None): def load_config(file, package=None, execute=True): - """ - Load a ZCML file into the context (and avoids duplicate imports). - """ + """Load a ZCML file into the context (and avoids duplicate imports).""" global _LOADED_CONFIGS key = (file, package) - if not key in _LOADED_CONFIGS: + if key not in _LOADED_CONFIGS: from Zope2.App import zcml + zcml.load_config(file, package, execute) _LOADED_CONFIGS.add(key) @@ -1726,42 +1886,38 @@ def load_config_override(file, package=None, execute=True): """ global _LOADED_CONFIGS key = (file, package) - if not key in _LOADED_CONFIGS: + if key not in _LOADED_CONFIGS: from zope.configuration import xmlconfig from Zope2.App.zcml import _context + xmlconfig.includeOverrides(_context, file, package=package) if execute: _context.execute_actions() _LOADED_CONFIGS.add(key) + def has_feature(name): """Return True if named feature is provided, otherwise return False.""" from Zope2.App.zcml import _context + return _context.hasFeature(name) + def rrd_daemon_running(): - """ - The RRD methods in this module are deprecated. - """ - pass + """The RRD methods in this module are deprecated.""" + def rrd_daemon_args(): - """ - The RRD methods in this module are deprecated. - """ - pass + """The RRD methods in this module are deprecated.""" + def rrd_daemon_reset(): - """ - The RRD methods in this module are deprecated. - """ - pass + """The RRD methods in this module are deprecated.""" + def rrd_daemon_retry(fn): - """ - The RRD methods in this module are deprecated. - """ - pass + """The RRD methods in this module are deprecated.""" + @contextlib.contextmanager def get_temp_dir(): @@ -1773,16 +1929,14 @@ def get_temp_dir(): finally: shutil.rmtree(dirname) + def getDefaultZopeUrl(): - """ - Returns the default Zope URL. - """ - return 'http://localhost:8080' + """Returns the default Zope URL.""" + return "http://localhost:8080" def swallowExceptions(log, msg=None, showTraceback=True, returnValue=None): - """ - USE THIS CAUTIOUSLY. Don't hide exceptions carelessly. + """USE THIS CAUTIOUSLY. Don't hide exceptions carelessly. Decorator to safely call a method, logging exceptions without raising them. @@ -1796,6 +1950,7 @@ def closeFilesBeforeExit(): @param showTraceback True to include the stacktrace (the default). @param returnValue The return value on error. """ + @decorator def callSafely(func, *args, **kwargs): try: @@ -1812,9 +1967,9 @@ def callSafely(func, *args, **kwargs): return callSafely + def getAllParserOptionsGen(parser): - """ - Returns a generator of all valid options for the optparse.OptionParser. + """Returns a generator of all valid options for the optparse.OptionParser. @param parser The parser to retrieve options for. @type parser optparse.OptionParser @@ -1833,9 +1988,9 @@ def ipv6_available(): except socket.error: return False + def atomicWrite(filename, data, raiseException=True, createDir=False): - """ - atomicWrite writes data in an atmomic manner to filename. + """Atomically writes data to filename. @param filename Complete path of file to write to. @type filename string @@ -1856,7 +2011,7 @@ def atomicWrite(filename, data, raiseException=True, createDir=False): # create a file in the same directory as the destination file with tempfile.NamedTemporaryFile(dir=dirName, delete=False) as tfile: tfile.write(data) - os.rename(tfile.name, filename) # atomic operation on POSIX systems + os.rename(tfile.name, filename) # atomic operation on POSIX systems except Exception as ex: if tfile is not None and os.path.exists(tfile.name): try: @@ -1869,62 +2024,80 @@ def atomicWrite(filename, data, raiseException=True, createDir=False): def isRunning(daemon): - """ - Determines whether a specific daemon is running by calling 'daemon status' - """ - return call([daemon, 'status'], stdout=PIPE, stderr=STDOUT) == 0 + """Return True if the specified daemon is running.""" + return call([daemon, "status"], stdout=PIPE, stderr=STDOUT) == 0 + def requiresDaemonShutdown(daemon, logger=log): - """ - Performs an operation while the requested daemon is not running. Will stop - and restart the daemon automatically. Throws a CalledProcessError if either - shutdown or restart fails. + """Performs an operation while the requested daemon is not running. + + Will stop and restart the daemon automatically. + + Throws a CalledProcessError if either shutdown or restart fails. @param daemon Which daemon to bring down for the operation. @param logger Which logger to use, or None to not log. """ + @decorator def callWithShutdown(func, *args, **kwargs): cmd = binPath(daemon) running = isRunning(cmd) if running: - if logger: logger.info('Shutting down %s for %s operation...', daemon, func.__name__) - check_call([cmd, 'stop']) + if logger: + logger.info( + "Shutting down %s for %s operation...", + daemon, + func.__name__, + ) + check_call([cmd, "stop"]) # make sure the daemon is actually shut down for i in range(30): nowrunning = isRunning(cmd) - if not nowrunning: break + if not nowrunning: + break time.sleep(1) else: - raise Exception('Failed to terminate daemon %s with command %s' % (daemon, cmd + ' stop')) + raise Exception( + "Failed to terminate daemon %s with command %s" + % (daemon, cmd + " stop") + ) try: return func(*args, **kwargs) except Exception as ex: - if logger: logger.error('Error performing %s operation: %s', func.__name__, ex) + if logger: + logger.error( + "Error performing %s operation: %s", func.__name__, ex + ) raise finally: if running: - if logger: logger.info('Starting %s after %s operation...', daemon, func.__name__) - check_call([cmd, 'start']) + if logger: + logger.info( + "Starting %s after %s operation...", + daemon, + func.__name__, + ) + check_call([cmd, "start"]) return callWithShutdown + def isZenBinFile(name): - """ - Check if given name is a valid file in $ZENHOME/bin. - """ + """Check if given name is a valid file in $ZENHOME/bin.""" if os.path.sep in name: return False return os.path.isfile(binPath(name)) def wait(seconds): - """ - Delays execution of subsequent code. Example: + """Delays execution of subsequent code. + + Example: @defer.inlineCallbacks def incrOne(a): @@ -1949,7 +2122,7 @@ def incrOne(a): giveTimeToReactor = partial(task.deferLater, reactor, 0) -def addXmlServerTimeout(server,timeout=socket._GLOBAL_DEFAULT_TIMEOUT): +def addXmlServerTimeout(server, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): """ Given an instance of xmlrpclib.ServerProxy (same as xmlrpclib.Server), attach a timeout for the underlying http/socket connection. @@ -1980,10 +2153,10 @@ def _timeout_make_connection(self, host): return self._connection[1] chost, self._extra_headers, x509 = self.get_host_info(host) - self._connection = host, httplib.HTTPConnection(chost,timeout=timeout) + self._connection = host, httplib.HTTPConnection(chost, timeout=timeout) return self._connection[1] - def _timeout_make_safe_connection(self,host): + def _timeout_make_safe_connection(self, host): if self._connection and host == self._connection[0]: return self._connection[1] try: @@ -1991,7 +2164,7 @@ def _timeout_make_safe_connection(self,host): except AttributeError: raise NotImplementedError( "your version of httplib doesn't support HTTPS" - ) + ) else: chost, self._extra_headers, x509 = self.get_host_info(host) kwargs = dict(timeout=timeout) @@ -2001,25 +2174,33 @@ def _timeout_make_safe_connection(self,host): return self._connection[1] transport = server._ServerProxy__transport - if isinstance( transport, xmlrpclib.SafeTransport ): - transport.make_connection = types.MethodType( _timeout_make_safe_connection, transport ) + if isinstance(transport, xmlrpclib.SafeTransport): + transport.make_connection = types.MethodType( + _timeout_make_safe_connection, transport + ) else: - transport.make_connection = types.MethodType( _timeout_make_connection, transport ) + transport.make_connection = types.MethodType( + _timeout_make_connection, transport + ) return server + def snmptranslate(*args): - command = ' '.join(['snmptranslate', '-Ln'] + list(args)) + command = " ".join(["snmptranslate", "-Ln"] + list(args)) proc = Popen(command, shell=True, stdout=PIPE, stderr=PIPE) output, errors = proc.communicate() proc.wait() if proc.returncode != 0: - log.error("snmptranslate returned errors for %s: %s", list(args), errors) - return 'Error translating: %s' % list(args) + log.error( + "snmptranslate returned errors for %s: %s", list(args), errors + ) + return "Error translating: %s" % list(args) return output.strip() -def getTranslation(msgId, REQUEST, domain='zenoss'): + +def getTranslation(msgId, REQUEST, domain="zenoss"): """ Take a string like: 'en-us,en;q=0.7,ja;q=0.3' @@ -2028,23 +2209,23 @@ def getTranslation(msgId, REQUEST, domain='zenoss'): Assumes that the input msgId is """ - langs = REQUEST.get('HTTP_ACCEPT_LANGUAGE').split(',') + langs = REQUEST.get("HTTP_ACCEPT_LANGUAGE").split(",") langOrder = [] for lang in langs: - data = lang.split(';q=') + data = lang.split(";q=") if len(data) == 1: - langOrder.append( (1.0, lang) ) + langOrder.append((1.0, lang)) else: - langOrder.append( (data[1], data[0]) ) + langOrder.append((data[1], data[0])) # Search for translations for weight, lang in sorted(langOrder): - msg = translate(msgId, domain=domain, - target_language=lang) + msg = translate(msgId, domain=domain, target_language=lang) # Relies on Zenoss currently using the text as the msgId if msg != msgId: return msg return msg + def unpublished(func): """Makes decorated method unpublished. @@ -2067,12 +2248,14 @@ def executeSshCommand(device, cmd, writefunc): loginTimeout=device.zCommandLoginTimeout, commandTimeout=device.zCommandCommandTimeout, keyPath=device.zKeyPath, - concurrentSessions=device.zSshConcurrentSessions + concurrentSessions=device.zSshConcurrentSessions, + ) + connection = SshClient( + device, + device.manageIp, + device.zCommandPort, + options=ssh_client_options, ) - connection = SshClient(device, - device.manageIp, - device.zCommandPort, - options=ssh_client_options) connection.clientFinished = reactor.stop connection.workList.append(cmd) connection._commands.append(cmd) @@ -2086,8 +2269,8 @@ def executeSshCommand(device, cmd, writefunc): def escapeSpecChars(value): - escape_re = re.compile(r'(?[$&|+\-!(){}[\]^~*?:])') - return escape_re.sub(r'\\\g', value) + escape_re = re.compile(r"(?[$&|+\-!(){}[\]^~*?:])") + return escape_re.sub(r"\\\g", value) def getPasswordFields(interface): @@ -2105,5 +2288,5 @@ def getPasswordFields(interface): def maskSecureProperties(data, secure_properties=[]): for prop in secure_properties: if data.get(prop, None): - data.update({prop: '*' * len(data[prop])}) + data.update({prop: "*" * len(data[prop])}) return data diff --git a/Products/ZenUtils/Version.py b/Products/ZenUtils/Version.py index 3a11f51cd4..1780e3b945 100644 --- a/Products/ZenUtils/Version.py +++ b/Products/ZenUtils/Version.py @@ -7,13 +7,18 @@ # ############################################################################## - """ Zenoss versioning module. """ + +from __future__ import print_function + import re +import six + + def getVersionTupleFromString(versionString): """ A utility function for parsing dot-delimited stings as a version tuple. @@ -197,25 +202,37 @@ def incrMicro(self): def setComment(self, comment): self.comment = comment - def __cmp__(self, other): - """ - Comparse one verion to another. If the other version supplied is not a - Version instance, attempt coercion. - - The assumption here is that any non-Version object being compared to a - Version object represents a verion of the same product with the same - name but a different version number. - """ + def __eq__(self, other): + if self is other: + return True + other = self._common_compare(other) + if not isinstance(other, Version): + return NotImplemented + return self.tuple() == other.tuple() + + def __lt__(self, other): + if self is other: + return False + other = self._common_compare(other) + if not isinstance(other, Version): + return NotImplemented + return self.tuple() < other.tuple() + + def __le__(self, other): + if self is other: + return True + other = self._common_compare(other) + if not isinstance(other, Version): + return NotImplemented + return self.tuple() <= other.tuple() + + def _common_compare(self, other): + other = Version.make(self.name, other) if other is None: - return 1 - if isinstance(other, tuple): - version = '.'.join(str(x) for x in other) - other = Version.parse("%s %s" % (self.name, version)) - elif any(isinstance(other, x) for x in (str, int, float, long)): - other = Version.parse("%s %s" % (self.name, str(other))) + return NotImplemented if self.name != other.name: raise IncomparableVersions() - return cmp(self.tuple(), other.tuple()) + return other def _formatSVNRevision(self): svnrev = self.revision @@ -242,6 +259,20 @@ def __str__(self): self.micro, self._formatSVNRevision()) + @classmethod + def make(cls, name, obj): + if isinstance(obj, cls): + return obj + if isinstance(obj, (tuple, list)): + version = '.'.join(str(x) for x in obj) + return cls.parse("%s %s" % (name, version)) + if any( + isinstance(obj, x) + for x in six.string_types + six.integer_types + (float,) + ): + return cls.parse("%s %s" % (name, obj)) + + @classmethod def parse(cls, versionString): """ Parse the version info from a string. This method is usable without @@ -263,26 +294,34 @@ def parse(cls, versionString): >>> v = Version.parse('Zenoss') >>> repr(v) 'Version(Zenoss, 0, 0, 0,)' - >>> print v + >>> print(v) [Zenoss, version 0.0.0] >>> v = Version.parse('Zenoss 1') >>> repr(v) 'Version(Zenoss, 1, 0, 0,)' - >>> print v + >>> print(v) [Zenoss, version 1.0.0] >>> v = Version.parse('Zenoss 0.26.4') >>> repr(v) 'Version(Zenoss, 0, 26, 4,)' - >>> print v + >>> print(v) [Zenoss, version 0.26.4] + >>> Version.parse('Zenoss 1.1.0') <= Version('Zenoss', 1, 0, 0) + False + >>> Version.parse('Zenoss 1.1.0') >= Version('Zenoss', 1, 0, 0) + True + >>> Version.parse('Zenoss 1.1.0') <= Version('Zenoss', 1, 1, 0) + True + >>> Version.parse('Zenoss 1.1.0') >= Version('Zenoss', 1, 1, 0) + True >>> v = Version.parse('Zenoss 0.32.1 r13667') >>> repr(v) 'Version(Zenoss, 0, 32, 1, r13667)' - >>> print v + >>> print(v) [Zenoss, version 0.32.1 r13667] """ versionParts = versionString.strip().split() @@ -302,7 +341,6 @@ def parse(cls, versionString): revision = '' self = Version(name, major, minor, micro, revision) return self - parse = classmethod(parse) def _test(): diff --git a/Products/ZenUtils/ZCmdBase.py b/Products/ZenUtils/ZCmdBase.py index a1f309be24..67bdbd96c0 100644 --- a/Products/ZenUtils/ZCmdBase.py +++ b/Products/ZenUtils/ZCmdBase.py @@ -1,50 +1,49 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -__doc__="""ZenDaemon - -$Id: ZC.py,v 1.9 2004/02/16 17:19:31 edahl Exp $""" - -__version__ = "$Revision: 1.9 $"[11:-2] +from __future__ import absolute_import, print_function import time -from collections import Iterator +from collections import Iterator from threading import Lock + +from AccessControl.SecurityManagement import ( + newSecurityManager, + noSecurityManager, +) from twisted.internet import defer from zope.component import getUtility -from AccessControl.SecurityManagement import newSecurityManager -from AccessControl.SecurityManagement import noSecurityManager +from Products.ZenRelations.ZenPropertyManager import setDescriptors -from Products.ZenUtils.Utils import getObjByPath, zenPath -from Products.ZenUtils.ZodbFactory import IZodbFactoryLookup +from .Exceptions import ZentinelException +from .mysql import MySQLdb +from .Utils import getObjByPath, wait, zenPath +from .ZenDaemon import ZenDaemon +from .ZodbFactory import IZodbFactoryLookup -from Exceptions import ZentinelException -from ZenDaemon import ZenDaemon +defaultCacheDir = zenPath("var") -from Products.ZenRelations.ZenPropertyManager import setDescriptors -from Products.ZenUtils.mysql import MySQLdb -from Products.ZenUtils.Utils import wait -defaultCacheDir = zenPath('var') +class DataRootError(Exception): + pass -class DataRootError(Exception):pass -def login(context, name='admin', userfolder=None): +def login(context, name="admin", userfolder=None): """Logs in.""" if userfolder is None: userfolder = context.getPhysicalRoot().acl_users user = userfolder.getUserById(name) - if user is None: return - if not hasattr(user, 'aq_base'): + if user is None: + return + if not hasattr(user, "aq_base"): user = user.__of__(userfolder) newSecurityManager(None, user) return user @@ -86,9 +85,10 @@ def next(self): class ZCmdBase(ZenDaemon): + """Base class for daemons that need ZODB access.""" def __init__(self, noopts=0, app=None, keeproot=False): - ZenDaemon.__init__(self, noopts, keeproot) + super(ZCmdBase, self).__init__(noopts=noopts, keeproot=keeproot) self.dataroot = None self.app = app self.db = None @@ -102,52 +102,47 @@ def __init__(self, noopts=0, app=None, keeproot=False): def zodbConnect(self): connectionFactory = getUtility(IZodbFactoryLookup).get() - self.db, self.storage = connectionFactory.getConnection(**self.options.__dict__) + self.db, self.storage = connectionFactory.getConnection( + **self.options.__dict__ + ) - def login(self, name='admin', userfolder=None): + def login(self, name="admin", userfolder=None): """Logs in.""" login(self.dmd, name, userfolder) - def logout(self): """Logs out.""" noSecurityManager() - def getConnection(self): - """Return a wrapped app connection from the connection pool. - """ + """Return a wrapped app connection from the connection pool.""" if not self.db: raise ZentinelException( - "running inside zope can't open connections.") + "running inside zope can't open connections." + ) with self.poollock: - connection=self.db.open() - root=connection.root() - app=root['Application'] + connection = self.db.open() + root = connection.root() + app = root["Application"] app = self.getContext(app) app._p_jar.sync() return app - def closeAll(self): - """Close all connections in both free an inuse pools. - """ + """Close all connections in both free an inuse pools.""" self.db.close() - def opendb(self): - if self.app: return - self.connection=self.db.open() - root=self.connection.root() - app=root['Application'] + if self.app: + return + self.connection = self.db.open() + root = self.connection.root() + app = root["Application"] self.app = self.getContext(app) - @defer.inlineCallbacks def async_syncdb(self): - """ - Asynchronous version of the syncdb method. - """ + """Asynchronous version of the syncdb method.""" last_exc = None for delay in _RetryIterator(): try: @@ -156,7 +151,8 @@ def async_syncdb(self): last_exc = str(exc) self.log.warn( "Connection to ZODB interrupted, will try " - "to reconnect again in %.3f seconds.", delay + "to reconnect again in %.3f seconds.", + delay, ) # yield back to reactor for 'delay' seconds yield wait(delay) @@ -170,18 +166,20 @@ def async_syncdb(self): "Timed out trying to reconnect to ZODB: %s", last_exc ) - def syncdb(self): MAX_RETRY_TIME_MINUTES = 10 MAX_RETRY_DELAY_SECONDS = 30 retryStartedAt = None + def timedOut(): if retryStartedAt is None: return False else: - return retryStartedAt + MAX_RETRY_TIME_MINUTES * 60 < time.time() - + return ( + retryStartedAt + MAX_RETRY_TIME_MINUTES * 60 < time.time() + ) + retryMultiplier = 1.618 retryDelay = 1 @@ -196,17 +194,21 @@ def timedOut(): self.log.exception(e) keepTrying = False break - + if retryDelay * retryMultiplier >= MAX_RETRY_DELAY_SECONDS: retryDelay = MAX_RETRY_DELAY_SECONDS else: retryDelay *= retryMultiplier - self.log.warn("Connection to ZODB interrupted, will try to reconnect again in %d seconds.", retryDelay) - + self.log.warn( + "Connection to ZODB interrupted, will try to " + "reconnect again in %d seconds.", + retryDelay, + ) + if retryStartedAt is None: retryStartedAt = time.time() - + try: time.sleep(retryDelay) except Exception as e: @@ -214,49 +216,46 @@ def timedOut(): else: keepTrying = False - def closedb(self): self.connection.close() - #self.db.close() + # self.db.close() self.app = None self.dataroot = None self.dmd = None - def getDataRoot(self): - if not self.app: self.opendb() + if not self.app: + self.opendb() if not self.dataroot: - self.dataroot = getObjByPath(self.app, self.options.dataroot) + self.dataroot = getObjByPath(self.app, self.options.zodb_dataroot) self.dmd = self.dataroot - def getContext(self, app): from ZPublisher.HTTPRequest import HTTPRequest from ZPublisher.HTTPResponse import HTTPResponse from ZPublisher.BaseRequest import RequestContainer + resp = HTTPResponse(stdout=None) env = { - 'SERVER_NAME':'localhost', - 'SERVER_PORT':'8080', - 'REQUEST_METHOD':'GET' - } + "SERVER_NAME": "localhost", + "SERVER_PORT": "8080", + "REQUEST_METHOD": "GET", + } req = HTTPRequest(None, env, resp) - return app.__of__(RequestContainer(REQUEST = req)) - + return app.__of__(RequestContainer(REQUEST=req)) def getDmdObj(self, path): - """return an object based on a path starting from the dmd""" - return getObjByPath(self.app, self.options.dataroot+path) - + """Return an object based on a path starting from the dmd.""" + return getObjByPath(self.app, self.options.zodb_dataroot + path) def findDevice(self, name): - """return a device based on its FQDN""" + """Return a device based on its FQDN.""" devices = self.dataroot.getDmdRoot("Devices") return devices.findDevice(name) def buildOptions(self): - """basic options setup sub classes can add more options here""" + """Basic options setup sub classes can add more options here.""" ZenDaemon.buildOptions(self) connectionFactory = getUtility(IZodbFactoryLookup).get() diff --git a/Products/ZenUtils/ZenBackup.py b/Products/ZenUtils/ZenBackup.py index 428981d5f3..b0508f26eb 100755 --- a/Products/ZenUtils/ZenBackup.py +++ b/Products/ZenUtils/ZenBackup.py @@ -153,7 +153,7 @@ def getName(index=0): (index and '_%s' % index) or '') backupDir = zenPath('backups') if not os.path.exists(backupDir): - os.mkdir(backupDir, 0750) + os.mkdir(backupDir, 0o750) for i in range(MAX_UNIQUE_NAME_ATTEMPTS): name = os.path.join(backupDir, getName(i)) if not os.path.exists(name): @@ -455,7 +455,7 @@ def makeBackup(self): self.rootTempDir = self.getTempDir() self.tempDir = os.path.join(self.rootTempDir, BACKUP_DIR) self.log.debug("Use %s as a staging directory for the backup", self.tempDir) - os.mkdir(self.tempDir, 0750) + os.mkdir(self.tempDir, 0o750) if self.options.collector: self.options.noEventsDb = True diff --git a/Products/ZenUtils/ZenDaemon.py b/Products/ZenUtils/ZenDaemon.py index 0a79d9d4df..49b5ad35ea 100755 --- a/Products/ZenUtils/ZenDaemon.py +++ b/Products/ZenUtils/ZenDaemon.py @@ -7,33 +7,34 @@ # ############################################################################## +from __future__ import absolute_import, print_function -"""ZenDaemon - -Base class for making daemon programs -""" - -import re -import sys +import logging import os import pwd +import re +import signal import socket -import logging +import sys -from twisted.internet import defer -from twisted.python import log as twisted_log +from platform import system +from urllib import getproxies + +from twisted.internet import defer, reactor from twisted.logger import globalLogBeginner +from twisted.python import log as twisted_log from Products.ZenMessaging.audit import audit -from Products.ZenUtils.CmdBase import CmdBase -from Products.ZenUtils.Utils import zenPath, HtmlFormatter, binPath, setLogLevel -from Products.ZenUtils.Watchdog import Reporter from Products.Zuul.utils import safe_hasattr as hasattr -from Products.ZenUtils.dumpthreads import dump_threads + +from .CmdBase import CmdBase +from .dumpthreads import dump_threads +from .Utils import binPath, HtmlFormatter, setLogLevel, zenPath +from .Watchdog import Reporter # Daemon creation code below based on Recipe by Chad J. Schroeder # File mode creation mask of the daemon. -UMASK = 0022 +UMASK = 0o022 # Default working directory for the daemon. WORKDIR = "/" @@ -46,9 +47,7 @@ class ZenDaemon(CmdBase): - """ - Base class for creating daemons - """ + """Base class for creating daemons.""" pidfile = None @@ -62,28 +61,32 @@ def __init__(self, noopts=0, keeproot=False): self.keeproot = keeproot self.reporter = None self.fqdn = socket.getfqdn() - from twisted.internet import reactor - reactor.addSystemEventTrigger('before', 'shutdown', self.sigTerm) + + reactor.addSystemEventTrigger("before", "shutdown", self.sigTerm) if not noopts: if self.options.daemon: self.changeUser() self.becomeDaemon() - if self.options.pidfile or self.options.daemon or self.options.watchdogPath: + if ( + self.options.pidfile + or self.options.daemon + or self.options.watchdogPath + ): try: self.writePidFile() except OSError: raise SystemExit( - "ERROR: unable to open PID file %s" - % (self.pidfile or "(unknown)",) - ) + "ERROR: unable to open PID file %s" + % (self.pidfile or "(unknown)",) + ) if self.options.watchdog and not self.options.watchdogPath: self.becomeWatchdog() - self.audit('Start') + self.audit("Start") def audit(self, action): - processName = re.sub(r'^.*/', '', sys.argv[0]) - daemon = re.sub('.py$', '', processName) - audit('Shell.Daemon.' + action, daemon=daemon) + processName = re.sub(r"^.*/", "", sys.argv[0]) + daemon = re.sub(".py$", "", processName) + audit("Shell.Daemon." + action, daemon=daemon) def convertSocketOption(self, optString): """ @@ -91,60 +94,62 @@ def convertSocketOption(self, optString): to a C-friendly command-line option for passing to zensocket. """ optString = optString.upper() - if '=' not in optString: # Assume boolean + if "=" not in optString: # Assume boolean flag = optString value = 1 else: - flag, value = optString.split('=', 1) + flag, value = optString.split("=", 1) try: value = int(value) except ValueError: self.log.warn( "The value %s for flag %s cound not be converted", - value, flag) + value, + flag, + ) return None # Check to see if we can find the option if flag not in dir(socket): - self.log.warn("The flag %s is not a valid socket option", - flag) + self.log.warn("The flag %s is not a valid socket option", flag) return None numericFlag = getattr(socket, flag) - return '--socketOpt=%s:%s' % (numericFlag, value) + return "--socketOpt=%s:%s" % (numericFlag, value) def openPrivilegedPort(self, *address): - """ - Execute under zensocket, providing the args to zensocket - """ + """Execute under zensocket, providing the args to zensocket.""" socketOptions = [] for optString in set(self.options.socketOption): arg = self.convertSocketOption(optString) if arg: socketOptions.append(arg) - zensocket = binPath('zensocket') - cmd = [zensocket, zensocket] + list(address) + socketOptions \ - + ['--', sys.executable] + sys.argv \ - + ['--useFileDescriptor=$privilegedSocket'] + zensocket = binPath("zensocket") + cmd = ( + [zensocket, zensocket] + + list(address) + + socketOptions + + ["--", sys.executable] + + sys.argv + + ["--useFileDescriptor=$privilegedSocket"] + ) self.log.debug(cmd) os.execlp(*cmd) def writePidFile(self): - """ - Write the PID file to disk - """ - pidfile = getattr(self.options, 'pidfile', '') + """Write the PID file to disk.""" + pidfile = getattr(self.options, "pidfile", "") if pidfile: myname = pidfile else: myname = sys.argv[0].split(os.sep)[-1] - if myname.endswith('.py'): + if myname.endswith(".py"): myname = myname[:-3] - monitor = getattr(self.options, 'monitor', 'localhost') + monitor = getattr(self.options, "monitor", "localhost") myname = "%s-%s.pid" % (myname, monitor) if self.options.watchdog and not self.options.watchdogPath: - self.pidfile = zenPath("var", 'watchdog-%s' % myname) + self.pidfile = zenPath("var", "watchdog-%s" % myname) else: self.pidfile = zenPath("var", myname) - fp = open(self.pidfile, 'w') + fp = open(self.pidfile, "w") mypid = str(os.getpid()) fp.write(mypid) fp.close() @@ -152,45 +157,52 @@ def writePidFile(self): @property def logname(self): - return getattr(self, 'mname', self.__class__.__name__) + return getattr(self, "mname", self.__class__.__name__) def setupLogging(self): - """ - Create formating for log entries and set default log level - """ - # Initialize twisted logging to go nowhere. (it may be re-enabled by SIGUSR1) - globalLogBeginner.beginLoggingTo([lambda x: None], redirectStandardIO=False, discardBuffer=True) + """Create formating for log entries and set default log level.""" + # Initialize twisted logging to go nowhere. + globalLogBeginner.beginLoggingTo( + [lambda x: None], redirectStandardIO=False, discardBuffer=True + ) # Setup python logging module rootLog = logging.getLogger() rootLog.setLevel(logging.WARN) - zenLog = logging.getLogger('zen') + zenLog = logging.getLogger("zen") zenLog.setLevel(self.options.logseverity) formatter = logging.Formatter( - '%(asctime)s %(levelname)s %(name)s: %(message)s') + "%(asctime)s %(levelname)s %(name)s: %(message)s" + ) if self.options.logfileonly: - #clear out existing handlers + # clear out existing handlers hdlrs = rootLog.handlers for hdlr in hdlrs: rootLog.removeHandler(hdlr) - if self.options.watchdogPath or self.options.daemon \ - or self.options.duallog or self.options.logfileonly: + if ( + self.options.watchdogPath + or self.options.daemon + or self.options.duallog + or self.options.logfileonly + ): logdir = self.checkLogpath() or zenPath("log") handler = logging.handlers.RotatingFileHandler( - filename=os.path.join( - logdir, '%s.log' % self.logname.lower()), - maxBytes=self.options.maxLogKiloBytes * 1024, - backupCount=self.options.maxBackupLogs + filename=os.path.join(logdir, "%s.log" % self.logname.lower()), + maxBytes=self.options.maxLogKiloBytes * 1024, + backupCount=self.options.maxBackupLogs, ) handler.setFormatter(formatter) rootLog.addHandler(handler) - if not (self.options.watchdogPath or self.options.daemon \ - or self.options.logfileonly): + if not ( + self.options.watchdogPath + or self.options.daemon + or self.options.logfileonly + ): # We are logging to the console # Find the stream handler and make it match our desired log level if self.options.weblog: @@ -201,15 +213,17 @@ def setupLogging(self): consoleHandler = logging.StreamHandler(sys.stderr) rootLog.addHandler(consoleHandler) - for handler in (h for h in rootLog.handlers - if isinstance(h, logging.StreamHandler)): + for handler in ( + h + for h in rootLog.handlers + if isinstance(h, logging.StreamHandler) + ): handler.setFormatter(formatter) - self.log = logging.getLogger('zen.%s' % self.logname) + self.log = logging.getLogger("zen.%s" % self.logname) # Allow the user to dynamically lower and raise the logging # level without restarts. - import signal try: signal.signal(signal.SIGUSR1, self.sighandler_USR1) except ValueError: @@ -223,11 +237,12 @@ def sighandler_USR1(self, signum, frame): Switch to debug level if signaled by the user, and to default when signaled again. """ + def getTwistedLogger(): loggerName = "zen.%s.twisted" % self.logname return twisted_log.PythonLoggingObserver(loggerName=loggerName) - log = logging.getLogger('zen') + log = logging.getLogger("zen") currentLevel = log.getEffectiveLevel() if currentLevel == logging.DEBUG: if self.options.logseverity == logging.DEBUG: @@ -236,13 +251,16 @@ def getTwistedLogger(): log.info( "Restoring logging level back to %s (%d)", logging.getLevelName(self.options.logseverity) or "unknown", - self.options.logseverity) + self.options.logseverity, + ) try: defer.setDebugging(False) getTwistedLogger().stop() except ValueError: # Twisted logging is somewhat broken - log.info("Unable to remove Twisted logger -- " - "expect Twisted logging to continue.") + log.info( + "Unable to remove Twisted logger -- " + "expect Twisted logging to continue." + ) else: setLogLevel(logging.DEBUG, "zen") log.info("Setting logging level to DEBUG") @@ -250,32 +268,30 @@ def getTwistedLogger(): getTwistedLogger().start() dump_threads(signum, frame) self._sigUSR1_called(signum, frame) - self.audit('Debug') + self.audit("Debug") def _sigUSR1_called(self, signum, frame): pass def changeUser(self): - """ - Switch identity to the appropriate Unix user - """ + """Switch identity to the appropriate Unix user.""" if not self.keeproot: try: cname = pwd.getpwuid(os.getuid())[0] pwrec = pwd.getpwnam(self.options.uid) os.setuid(pwrec.pw_uid) - os.environ['HOME'] = pwrec.pw_dir + os.environ["HOME"] = pwrec.pw_dir except (KeyError, OSError): - print >>sys.stderr, "WARN: user:%s not found running as:%s" \ - % (self.options.uid, cname) + print( + "WARN: user:%s not found running as:%s" + % (self.options.uid, cname), + file=sys.stderr, + ) def becomeDaemon(self): - """Code below comes from the excellent recipe by Chad J. Schroeder. - """ + """Code below comes from the excellent recipe by Chad J. Schroeder.""" # Workaround for http://bugs.python.org/issue9405 on Mac OS X - from platform import system - if system() == 'Darwin': - from urllib import getproxies + if system() == "Darwin": getproxies() try: pid = os.fork() @@ -307,14 +323,13 @@ def becomeDaemon(self): os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) # Duplicate standard input to standard output and standard error. - os.dup2(0, 1) # standard output (1) - os.dup2(0, 2) # standard error (2) + os.dup2(0, 1) # standard output (1) + os.dup2(0, 2) # standard error (2) def sigTerm(self, signum=None, frame=None): - """ - Signal handler for the SIGTERM signal. - """ + """Signal handler for the SIGTERM signal.""" from Products.ZenUtils.Utils import unused + unused(signum, frame) stop = getattr(self, "stop", None) if callable(stop): @@ -322,76 +337,78 @@ def sigTerm(self, signum=None, frame=None): if self.pidfile and os.path.exists(self.pidfile): self.log.info("Deleting PID file %s ...", self.pidfile) os.remove(self.pidfile) - self.log.info('Daemon %s shutting down', type(self).__name__) - self.audit('Stop') + self.log.info("received signal to shut down") + self.audit("Stop") def watchdogCycleTime(self): """ - Return our cycle time (in minutes) + Return our cycle time (in minutes). @return: cycle time @rtype: integer """ # time between child reports: default to 2x the default cycle time default = 1200 - cycleTime = getattr(self.options, 'cycleTime', default) + cycleTime = getattr(self.options, "cycleTime", default) if not cycleTime: cycleTime = default return cycleTime def watchdogStartTimeout(self): """ - Return our watchdog start timeout (in minutes) + Return our watchdog start timeout (in minutes). @return: start timeout @rtype: integer """ # Default start timeout should be cycle time plus a couple of minutes default = self.watchdogCycleTime() + 120 - startTimeout = getattr(self.options, 'starttimeout', default) + startTimeout = getattr(self.options, "starttimeout", default) if not startTimeout: startTimeout = default return startTimeout def watchdogMaxRestartTime(self): """ - Return our watchdog max restart time (in minutes) + Return our watchdog max restart time (in minutes). @return: maximum restart time @rtype: integer """ default = 600 - maxTime = getattr(self.options, 'maxRestartTime', default) + maxTime = getattr(self.options, "maxRestartTime", default) if not maxTime: maxTime = default return default def becomeWatchdog(self): - """ - Watch the specified daemon and restart it if necessary. - """ + """Watch the specified daemon and restart it if necessary.""" from Products.ZenUtils.Watchdog import Watcher, log + log.setLevel(self.options.logseverity) cmd = sys.argv[:] - if '--watchdog' in cmd: - cmd.remove('--watchdog') - if '--daemon' in cmd: - cmd.remove('--daemon') + if "--watchdog" in cmd: + cmd.remove("--watchdog") + if "--daemon" in cmd: + cmd.remove("--daemon") - socketPath = '%s/.%s-watchdog-%d' % ( - zenPath('var'), self.__class__.__name__, os.getpid()) + socketPath = "%s/.%s-watchdog-%d" % ( + zenPath("var"), + self.__class__.__name__, + os.getpid(), + ) cycleTime = self.watchdogCycleTime() startTimeout = self.watchdogStartTimeout() maxTime = self.watchdogMaxRestartTime() - self.log.debug("Watchdog cycleTime=%d startTimeout=%d maxTime=%d", - cycleTime, startTimeout, maxTime) - - watchdog = Watcher(socketPath, - cmd, - startTimeout, - cycleTime, - maxTime) + self.log.debug( + "Watchdog cycleTime=%d startTimeout=%d maxTime=%d", + cycleTime, + startTimeout, + maxTime, + ) + + watchdog = Watcher(socketPath, cmd, startTimeout, cycleTime, maxTime) watchdog.run() sys.exit(0) @@ -405,45 +422,88 @@ def niceDoggie(self, timeout): self.reporter.niceDoggie(timeout) def buildOptions(self): - """ - Standard set of command-line options. - """ - CmdBase.buildOptions(self) - self.parser.add_option('--uid', dest='uid', default="zenoss", - help='User to become when running default:zenoss') - self.parser.add_option('-c', '--cycle', dest='cycle', - action="store_true", default=False, - help="Cycle continuously on cycleInterval from Zope") - self.parser.add_option('-D', '--daemon', default=False, - dest='daemon', action="store_true", - help="Launch into the background") - self.parser.add_option('--duallog', default=False, - dest='duallog', action="store_true", - help="Log to console and log file") - self.parser.add_option('--logfileonly', default=False, - dest='logfileonly', action="store_true", - help="Log to log file and not console") - self.parser.add_option('--weblog', default=False, - dest='weblog', action="store_true", - help="output log info in HTML table format") - self.parser.add_option('--watchdog', default=False, - dest='watchdog', action="store_true", - help="Run under a supervisor which will restart it") - self.parser.add_option('--watchdogPath', default=None, - dest='watchdogPath', - help="The path to the watchdog reporting socket") - self.parser.add_option('--starttimeout', - dest='starttimeout', type="int", - help="Wait seconds for initial heartbeat") - self.parser.add_option('--socketOption', - dest='socketOption', default=[], action='append', - help="Set listener socket options. " - "For option details: man 7 socket") - self.parser.add_option('--heartbeattimeout', - dest='heartbeatTimeout', - type='int', - help="Set a heartbeat timeout in seconds for a daemon", - default=900) - self.parser.add_option('--pidfile', dest='pidfile', default="", - help='pidfile to save a pid number of a process') - + super(ZenDaemon, self).buildOptions() + self.parser.add_option( + "--uid", + dest="uid", + default="zenoss", + help="User to become when running; default %default", + ) + self.parser.add_option( + "-c", + "--cycle", + dest="cycle", + action="store_true", + default=False, + help="Cycle continuously on cycleInterval from Zope", + ) + self.parser.add_option( + "-D", + "--daemon", + default=False, + dest="daemon", + action="store_true", + help="Launch into the background", + ) + self.parser.add_option( + "--duallog", + default=False, + dest="duallog", + action="store_true", + help="Log to console and log file", + ) + self.parser.add_option( + "--logfileonly", + default=False, + dest="logfileonly", + action="store_true", + help="Log to log file and not console", + ) + self.parser.add_option( + "--weblog", + default=False, + dest="weblog", + action="store_true", + help="output log info in HTML table format", + ) + self.parser.add_option( + "--watchdog", + default=False, + dest="watchdog", + action="store_true", + help="Run under a supervisor which will restart it", + ) + self.parser.add_option( + "--watchdogPath", + default=None, + dest="watchdogPath", + help="The path to the watchdog reporting socket", + ) + self.parser.add_option( + "--starttimeout", + dest="starttimeout", + type="int", + help="Wait seconds for initial heartbeat", + ) + self.parser.add_option( + "--socketOption", + dest="socketOption", + default=[], + action="append", + help="Set listener socket options. " + "For option details: man 7 socket", + ) + self.parser.add_option( + "--heartbeattimeout", + dest="heartbeatTimeout", + type="int", + default=getattr(self, "heartbeatTimeout", 900), + help="Set a heartbeat timeout in seconds for a daemon; " + "default %default", + ) + self.parser.add_option( + "--pidfile", + dest="pidfile", + default="", + help="pidfile to save a pid number of a process", + ) diff --git a/Products/ZenUtils/ZenPackCmd.py b/Products/ZenUtils/ZenPackCmd.py index de1d4d759c..b4c4553803 100644 --- a/Products/ZenUtils/ZenPackCmd.py +++ b/Products/ZenUtils/ZenPackCmd.py @@ -77,7 +77,7 @@ def CreateZenPack(zpId, prevZenPackName='', devDir=None): if not devDir: devDir = zenPath('ZenPacks') if not os.path.exists(devDir): - os.mkdir(devDir, 0750) + os.mkdir(devDir, 0o750) destDir = os.path.join(devDir, zpId) shutil.copytree(srcDir, destDir, symlinks=False) os.system('find %s -name .svn | xargs rm -rf' % destDir) @@ -695,7 +695,7 @@ def CreateZenPacksDir(): """ zpDir = zenPath('ZenPacks') if not os.path.isdir(zpDir): - os.mkdir(zpDir, 0750) + os.mkdir(zpDir, 0o750) def DoEasyInstall(eggPath): diff --git a/Products/ZenUtils/ZenScriptBase.py b/Products/ZenUtils/ZenScriptBase.py index 5493c7e0e4..16457ea60c 100644 --- a/Products/ZenUtils/ZenScriptBase.py +++ b/Products/ZenUtils/ZenScriptBase.py @@ -1,13 +1,12 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - """ZenScriptBase Scripts with classes who extend ZenScriptBase have a zope instance with a @@ -16,22 +15,27 @@ from zope.component import getUtility -from AccessControl.SecurityManagement import newSecurityManager -from AccessControl.SecurityManagement import noSecurityManager +from AccessControl.SecurityManagement import ( + newSecurityManager, + noSecurityManager, +) from transaction import commit -from Products.ZenUtils.Utils import getObjByPath, zenPath, set_context -from Products.ZenUtils.CmdBase import CmdBase -from Products.ZenUtils.ZodbFactory import IZodbFactoryLookup from Products.ZenRelations.ZenPropertyManager import setDescriptors -from Products.ZenUtils.Exceptions import ZentinelException -defaultCacheDir = zenPath('var') +from .CmdBase import CmdBase +from .Exceptions import ZentinelException +from .Utils import getObjByPath, set_context, zenPath +from .ZodbFactory import IZodbFactoryLookup -class DataRootError(Exception):pass +defaultCacheDir = zenPath("var") -class ZenScriptBase(CmdBase): +class DataRootError(Exception): + pass + + +class ZenScriptBase(CmdBase): def __init__(self, noopts=0, app=None, connect=False, should_log=True): CmdBase.__init__(self, noopts, should_log=should_log) self.dataroot = None @@ -43,93 +47,86 @@ def __init__(self, noopts=0, app=None, connect=False, should_log=True): def connect(self): if not self.app: connectionFactory = getUtility(IZodbFactoryLookup).get() - self.db, self.storage = connectionFactory.getConnection(**self.options.__dict__) + self.db, self.storage = connectionFactory.getConnection( + **self.options.__dict__ + ) self.getDataRoot() self.login() - if getattr(self.dmd, 'propertyTransformers', None) is None: + if getattr(self.dmd, "propertyTransformers", None) is None: self.dmd.propertyTransformers = {} commit() setDescriptors(self.dmd) - - def login(self, name='admin', userfolder=None): + def login(self, name="admin", userfolder=None): """Logs in.""" if userfolder is None: userfolder = self.app.acl_users user = userfolder.getUserById(name) - if user is None: return - if not hasattr(user, 'aq_base'): + if user is None: + return + if not hasattr(user, "aq_base"): user = user.__of__(userfolder) newSecurityManager(None, user) - def logout(self): """Logs out.""" noSecurityManager() - def getConnection(self): - """Return a wrapped app connection from the connection pool. - """ + """Return a wrapped app connection from the connection pool.""" if not self.db: raise ZentinelException( - "running inside zope can't open connections.") + "running inside zope can't open connections." + ) with self.poollock: - connection=self.db.open() - root=connection.root() - app=root['Application'] + connection = self.db.open() + root = connection.root() + app = root["Application"] app = set_context(app) app._p_jar.sync() return app - def closeAll(self): - """Close all connections in both free an inuse pools. - """ + """Close all connections in both free an inuse pools.""" self.db.close() - def opendb(self): - if self.app: return - self.connection=self.db.open() - root=self.connection.root() - app = root['Application'] + if self.app: + return + self.connection = self.db.open() + root = self.connection.root() + app = root["Application"] self.app = set_context(app) self.app._p_jar.sync() - def syncdb(self): self.connection.sync() - def closedb(self): self.connection.close() - #self.db.close() + # self.db.close() self.app = None self.dataroot = None self.dmd = None - def getDataRoot(self): - if not self.app: self.opendb() + if not self.app: + self.opendb() if not self.dataroot: - self.dataroot = getObjByPath(self.app, self.options.dataroot) + self.dataroot = getObjByPath(self.app, self.options.zodb_dataroot) self.dmd = self.dataroot - def getDmdObj(self, path): - """return an object based on a path starting from the dmd""" - return getObjByPath(self.app, self.options.dataroot+path) - + """Return an object based on a path starting from the dmd""" + return getObjByPath(self.app, self.options.zodb_dataroot + path) def findDevice(self, name): - """return a device based on its FQDN""" + """Return a device based on its FQDN""" devices = self.dataroot.getDmdRoot("Devices") return devices.findDevice(name) - def buildOptions(self): - """basic options setup sub classes can add more options here""" + """Basic options setup sub classes can add more options here""" CmdBase.buildOptions(self) connectionFactory = getUtility(IZodbFactoryLookup).get() diff --git a/Products/ZenUtils/ZodbFactory.py b/Products/ZenUtils/ZodbFactory.py index 1ec780b0e0..1ee8f9bfa1 100644 --- a/Products/ZenUtils/ZodbFactory.py +++ b/Products/ZenUtils/ZodbFactory.py @@ -1,38 +1,41 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2011, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from zope.interface import implementer, Interface +from zope.component import queryUtility + +from .GlobalConfig import globalConfToDict -__doc__="""ZodbConnection -""" -from Products.ZenUtils.GlobalConfig import globalConfToDict -from zope.interface import Interface -from zope.interface import implements -from zope.component import queryUtility class IZodbFactoryLookup(Interface): def get(name=None): - """Return the a ZODB connection Factory by name or look up in global.conf.""" + """ + Return the a ZODB connection Factory by name or look up in + global.conf. + """ + +@implementer(IZodbFactoryLookup) class ZodbFactoryLookup(object): - implements(IZodbFactoryLookup) def get(self, name=None): - """Return the ZODB connection factory by name or look up in global.conf.""" + """ + Return the ZODB connection factory by name or look up in global.conf. + """ if name is None: settings = globalConfToDict() - name = settings.get('zodb-db-type', 'mysql') + name = settings.get("zodb-db-type", "mysql") connectionFactory = queryUtility(IZodbFactory, name) return connectionFactory class IZodbFactory(Interface): - def getZopeZodbConf(): """Return a zope.conf style stanza for the zodb connection.""" diff --git a/Products/ZenUtils/__init__.py b/Products/ZenUtils/__init__.py old mode 100755 new mode 100644 diff --git a/Products/ZenUtils/captureReplay.py b/Products/ZenUtils/captureReplay.py deleted file mode 100644 index 4fcdee5a40..0000000000 --- a/Products/ZenUtils/captureReplay.py +++ /dev/null @@ -1,204 +0,0 @@ -#! /usr/bin/env python -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - - -__doc__ = """captureReplay - Common code to capture and replay packets. - - To use: -1. Add the captureReplay mixin to the list of base classes - -2. Add the following to the buildOptions method of the base class, after other - initialization: - captureReplay.buildOptions() - -3. Add the following to the __init__ of the base class, before any other - option processing: - self.processCaptureReplayOptions() - -4. Define a convertPacketToPython() method to convert a 'raw' packet into a - Python serializable object. - -5. Add a call to the capturePacket() method to capture the packet. - -6. Define a replay() method to replay the packet. -""" - -import sys -import cPickle -from exceptions import EOFError, IOError -import glob - -from twisted.internet import defer, reactor -from Products.ZenUtils.Timeout import timeout -from Products.ZenEvents.ZenEventClasses import Error, Warning, Info, \ - Debug - -from twisted.python import failure - -class FakePacket(object): - """ - A fake object to make packet replaying feasible. - """ - def __init__(self): - self.fake = True - - -class CaptureReplay(object): - """ - Base class for packet capture and replay capability. - Assumes that the other classes provide the following: - self.buildOptions() - self.sendEvent() - - Overrides the self.connected() method if called to replay packets. - """ - - def processCaptureReplayOptions(self): - """ - Inside of the initializing class, call these functions first. - """ - if self.options.captureFilePrefix and len(self.options.replayFilePrefix) > 0: - self.log.error( "Can't specify both --captureFilePrefix and -replayFilePrefix" - " at the same time. Exiting" ) - sys.exit(1) - - if self.options.captureFilePrefix and not self.options.captureAll and \ - self.options.captureIps == '': - self.log.warn( "Must specify either --captureIps or --captureAll for" - " --capturePrefix to take effect. Ignoring option --capturePrefix" ) - - if len(self.options.replayFilePrefix) > 0: - self.connected = self.replayAll - return - - self.captureSerialNum = 0 - self.captureIps = self.options.captureIps.split(',') - - def convertPacketToPython(*packetInfo): - """ - Convert arguments into an plain object (no functions) suitable - for pickling. - """ - pass - - def capturePacket(self, hostname, *packetInfo): - """ - Store the raw packet for later examination and troubleshooting. - - @param hostname: packet-sending host's name or IP address - @type hostname: string - @param packetInfo: raw packet and other necessary arguments - @type packetInfo: args - """ - # Save the raw data if requested to do so - if not self.options.captureFilePrefix: - return - if not self.options.captureAll and hostname not in self.captureIps: - self.log.debug( "Received packet from %s, but not in %s", hostname, - self.captureIps) - return - - self.log.debug( "Capturing packet from %s", hostname ) - name = "%s-%s-%d" % (self.options.captureFilePrefix, hostname, self.captureSerialNum) - try: - packet = self.convertPacketToPython(*packetInfo) - capFile = open( name, "wb") - data= cPickle.dumps(packet, cPickle.HIGHEST_PROTOCOL) - capFile.write(data) - capFile.close() - self.captureSerialNum += 1 - except Exception: - self.log.exception("Couldn't write capture data to '%s'", name ) - - def replayAll(self): - """ - Replay all captured packets using the files specified in - the --replayFilePrefix option and then exit. - - Note that this calls the Twisted stop() method - """ - if hasattr(self, 'configure'): - d = self.configure() - d.addCallback(self._replayAll) - else: - self._replayAll() - - def _replayAll(self, ignored=None): - # Note what you are about to see below is a direct result of optparse - # adding in the arguments *TWICE* each time --replayFilePrefix is used. - files = [] - for filespec in self.options.replayFilePrefix: - files += glob.glob( filespec + '*' ) - - self.loaded = 0 - self.replayed = 0 - for file in set(files): - self.log.debug( "Attempting to read packet data from '%s'", file ) - try: - fp = open( file, "rb" ) - packet= cPickle.load(fp) - fp.close() - self.loaded += 1 - - except (IOError, EOFError): - fp.close() - self.log.exception( "Unable to load packet data from %s", file ) - continue - - self.log.debug("Calling application-specific replay() method") - self.replay(packet) - - self.replayStop() - - def replay(self, packet): - """ - Replay a captured packet. This must be overridden. - - @param packet: raw packet - @type packet: binary - """ - pass - - def replayStop(self): - """ - Twisted method that we use to override the default stop() method - for when we are replaying packets. This version waits to make - sure that all of our deferreds have exited before pulling the plug. - """ - if self.replayed == self.loaded: - self.log.info("Loaded and replayed %d packets", self.replayed) - reactor.stop() - else: - reactor.callLater(1, self.replayStop) - - def buildCaptureReplayOptions(self, parser): - """ - This should be called explicitly in the base class' buildOptions - """ - parser.add_option('--captureFilePrefix', - dest='captureFilePrefix', - default=None, - help="Directory and filename to use as a template" + \ - " to store captured raw trap packets.") - parser.add_option('--captureAll', - dest='captureAll', - action='store_true', - default=False, - help="Capture all packets.") - parser.add_option('--captureIps', - dest='captureIps', - default='', - help="Comma-separated list of IP addresses to capture.") - parser.add_option('--replayFilePrefix', - dest='replayFilePrefix', - action='append', - default=[], - help="Filename prefix containing captured packet data. Can specify more than once.") diff --git a/Products/ZenUtils/config.py b/Products/ZenUtils/config.py index 833ce238fe..859874f819 100644 --- a/Products/ZenUtils/config.py +++ b/Products/ZenUtils/config.py @@ -1,51 +1,54 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -__doc__ = """ +""" Zenoss config parsers. -There are mutiple stages to config parsing. Parsing is split into stages so that -we can validate a whole config file and possibly rebuild it to correct errors. +There are mutiple stages to config parsing. Parsing is split into stages so +that we can validate a whole config file and possibly rebuild it to correct +errors. The stages are: -* Parse - Split the config file in to ConfigLine types while maintaining line order, comments, and new lines +* Parse - Split the config file in to ConfigLine types while maintaining line + order, comments, and new lines * Validate - Check that all lines are valid * Report - Investigate why a line might be invalid (ex: invalid key format) * Load - Get a config object back * Write - An optional stage to write the config back to a file """ +from __future__ import absolute_import, print_function + import re +import six + + class ConfigError(Exception): - """ - Error for problems parsing config files. - """ - pass + """Error for problems parsing config files.""" + class ConfigLineError(ConfigError): - """ - Error for problems parsing config files with line context. - """ + """Error for problems parsing config files with line context.""" + def __init__(self, message, lineno): super(ConfigLineError, self).__init__(message) self.lineno = lineno def __str__(self): - return '%s on line %d' % (self.message, self.lineno) + return "%s on line %d" % (self.message, self.lineno) + class ConfigErrors(ConfigError): - """ - A group of errors while parsing config. - """ + """A group of errors while parsing config.""" + def __init__(self, message, errors): super(ConfigErrors, self).__init__(message) self.errors = errors @@ -55,14 +58,17 @@ def __str__(self): for error in self.errors: output.append(str(error)) - return '\n - '.join(output) + return "\n - ".join(output) + class InvalidKey(ConfigError): pass + class ConfigLineKeyError(ConfigLineError): pass + class Config(dict): """ A bunch of configuration settings. Uses dictionary access, @@ -70,6 +76,7 @@ class Config(dict): Provides some Convenience functions for different types. """ + def __getattr__(self, attr): return self[attr] @@ -82,7 +89,7 @@ def getbool(self, key, default=None): If key doesn't exist, returns `default`. """ try: - return self[key].lower() in ('true', 'yes', 'y', '1') + return self[key].lower() in ("true", "yes", "y", "1") except KeyError: return default @@ -112,10 +119,10 @@ def getfloat(self, key, default=None): except (KeyError, ValueError): return default + class ConfigLine(object): - """ - Abstract class that represents a single line in the config. - """ + """Abstract class that represents a single line in the config.""" + def __init__(self, line): self.line = line @@ -126,52 +133,48 @@ def __str__(self): def setting(self): """ Return a key, value tuple if this line represents a setting. - Implemented in base classes. """ - return None + return NotImplemented @classmethod def parse(cls, line): """ - Returns an instance of cls if this class can parse this line. Otherwise returns None. - Implemented in base classes. + Returns an instance of cls if this class can parse this line. + Otherwise returns None. """ - return None + return NotImplemented @classmethod def checkError(cls, line, lineno): """ - Checks the string for possible matches, considers why it doesn't match exactly if it's close - and returns a ConfigLineError. - Implemented in base classes. + Checks the string for possible matches, considers why it doesn't match + exactly if it's close and returns a ConfigLineError. """ - return None + return NotImplemented + class SettingLine(ConfigLine): - """ - Represents a config line with a `key = value` pair. - """ - _regexp = re.compile(r'^(?P[a-z]+([a-z\d_]|-[a-z\d_])*)\s*(?P(=|:|\s)*)\s*(?P.*)$', re.I) + """Represents a config line with a `key = value` pair.""" + + _regexp = re.compile( + r"^(?P[a-z]+([a-z\d_]|-[a-z\d_])*)" # key + r"\s*(?P(=|:|\s)*)" # delimiter + r"\s*(?P.*)$", # value + re.I, + ) - def __init__(self, key, value=None, delim='='): + def __init__(self, key, value=None, delim="="): self.key = key self.value = value self.delim = delim + def __str__(self): + return "{key} {delim} {value}".format(**self.__dict__) + @property def setting(self): return self.key, self.value - def __str__(self): - return '{key} {delim} {value}'.format(**self.__dict__) - - @classmethod - def checkError(cls, line, lineno): - match = re.match(r'^(?P.+?)\s*(?P(=|:|\s)+)\s*(?P.+)$', line, re.I) - if match and not cls._regexp.match(line): - return ConfigLineKeyError('Invalid key "%s"' % match.groupdict()['key'], lineno) - - @classmethod def parse(cls, line): match = cls._regexp.match(line) @@ -179,33 +182,76 @@ def parse(cls, line): data = match.groupdict() return cls(**data) + @classmethod + def checkError(cls, line, lineno): + match = re.match( + r"^(?P.+?)\s*(?P(=|:|\s)+)\s*(?P.+)$", + line, + re.I, + ) + if match and not cls._regexp.match(line): + return ConfigLineKeyError( + 'Invalid key "%s"' % match.groupdict()["key"], lineno + ) + + class CommentLine(ConfigLine): + @property + def setting(self): + return None + @classmethod def parse(cls, line): - if line.startswith('#'): + if line.startswith("#"): return cls(line[1:].strip()) + @classmethod + def checkError(cls, line, lineno): + return None + def __str__(self): - return '# %s' % self.line + return "# %s" % self.line + class EmptyLine(ConfigLine): def __init__(self): pass + @property + def setting(self): + return None + @classmethod def parse(cls, line): - if line == '': + if line == "": return cls() + @classmethod + def checkError(cls, line, lineno): + return None + def __str__(self): - return '' + return "" + class InvalidLine(ConfigLine): """ Default line if no other ConfigLines matched. Assumed to be invalid input. """ - pass + + @property + def setting(self): + return None + + @classmethod + def parse(cls, line): + return None + + @classmethod + def checkError(cls, line, lineno): + return None + class ConfigFile(object): """ @@ -236,7 +282,9 @@ def __init__(self, file): @param file file-like-object """ self.file = file - self.filename = self.file.name if hasattr(self.file, 'name') else 'Unknown' + self.filename = ( + self.file.name if hasattr(self.file, "name") else "Unknown" + ) self._lines = None def _parseLine(self, line): @@ -248,7 +296,6 @@ def _parseLine(self, line): return self._invalidLineType(cleanedLine) - def _checkLine(self, line, lineno): cleanedLine = line.strip() for type in self._lineTypes: @@ -259,8 +306,8 @@ def _checkLine(self, line, lineno): def parse(self): """ Parse a config file which has key-value pairs.Returns a list of config - line information. This line information can be used to accuratly recreate - the config without losing comments or invalid data. + line information. This line information can be used to accuratly + recreate the config without losing comments or invalid data. """ if self._lines is None: self._lines = [] @@ -277,7 +324,7 @@ def write(self, file): @param file file-like-object """ for line in self: - file.write(str(line) + '\n') + file.write(str(line) + "\n") def validate(self): """ @@ -293,10 +340,18 @@ def validate(self): if error: errors.append(error) else: - errors.append(ConfigLineError('Unexpected config line "%s"' % line.line, lineno + 1)) + errors.append( + ConfigLineError( + 'Unexpected config line "%s"' % line.line, + lineno + 1, + ) + ) if errors: - raise ConfigErrors('There were errors parsing the config "%s".' % self.filename, errors) + raise ConfigErrors( + 'There were errors parsing the config "%s".' % self.filename, + errors, + ) def __iter__(self): for line in self.parse(): @@ -307,58 +362,54 @@ def items(self): if line.setting: yield line.setting -class Parser(object): - def __call__(self, file): - configFile = ConfigFile(file) - configFile.validate() - return configFile.items() + +def _parse(file): + configFile = ConfigFile(file) + configFile.validate() + return configFile.items() class ConfigLoader(object): - """ - Lazily load the config when requested. - """ - def __init__(self, config_files, config=Config, parser=Parser()): + """Lazily load the config when requested.""" + + def __init__(self, config_files, config=Config): """ - @param config Config The config instance or class to load data into. Must support update which accepts an iterable of (key, value). - @param parser Parser The parser to use to parse the config files. Must be a callable and return an iterable of (key, value). - @param config_files list A list of config file names to parse in order. + :param config Config The config instance or class to load data into. + Must support update which accepts an iterable of (key, value). + :param config_files list A list of config file names to + parse in order. """ if not isinstance(config_files, list): config_files = [config_files] self.config_files = config_files - self.parser = parser self.config = config self._config = None def load(self): - """ - Load the config_files into an instance of config_class - """ + """Load the config_files into an instance of config_class.""" if isinstance(self.config, type): self._config = self.config() else: self._config = self.config if not self.config_files: - raise ConfigError('Config loader has no config files to load.') + raise ConfigError("Config loader has no config files to load.") for file in self.config_files: - if not hasattr(file, 'read') and isinstance(file, basestring): - # Look like a file name, open it - with open(file, 'r') as fp: - options = self.parser(fp) + if isinstance(file, six.string_types): + # It's a string, so open it first + with open(file, "r") as fp: + options = _parse(fp) else: - options = self.parser(file) - - self._config.update(options) + # Assume it's an open file + options = _parse(file) + for k, v in options: + self._config[k] = v def __call__(self): - """ - Lazily load the config file. - """ + """Lazily load the config file.""" if self._config is None: self.load() diff --git a/Products/ZenUtils/configlog.py b/Products/ZenUtils/configlog.py index ccc2ca0c40..2999b8544b 100644 --- a/Products/ZenUtils/configlog.py +++ b/Products/ZenUtils/configlog.py @@ -109,11 +109,8 @@ def addLogsFromConfigFile(fname, configDefaults=None): log.exception('Problem loading log configuration file: %s', fname) return False - # critical section - logging._acquireLock() try: - logging._handlers.clear() - del logging._handlerList[:] + logging._acquireLock() # Handlers add themselves to logging._handlers handlers = logging.config._install_handlers(cp, formatters) _zen_install_loggers(cp, handlers) diff --git a/Products/ZenUtils/controlplane/__init__.py b/Products/ZenUtils/controlplane/__init__.py index e05650c31c..cf719a25cf 100644 --- a/Products/ZenUtils/controlplane/__init__.py +++ b/Products/ZenUtils/controlplane/__init__.py @@ -7,12 +7,26 @@ # ############################################################################## -from .data import * -from .client import * -from servicetree import ServiceTree -from Products.ZenUtils.GlobalConfig import globalConfToDict +from __future__ import absolute_import, print_function + import os +from Products.ZenUtils.GlobalConfig import globalConfToDict + +from .data import ( + Host, + HostFactory, + ServiceDefinition, + ServiceDefinitionFactory, + ServiceInstance, + ServiceInstanceFactory, + ServiceJsonDecoder, + ServiceJsonEncoder, +) +from .client import ControlPlaneClient, ControlCenterError +from .environment import configuration +from .servicetree import ServiceTree + def getConnectionSettings(options=None): if options is None: @@ -22,10 +36,29 @@ def getConnectionSettings(options=None): settings = { "user": o.get("controlplane-user", "zenoss"), "password": o.get("controlplane-password", "zenoss"), - } + } # allow these to be set from the global.conf for development but # give preference to the environment variables - settings["user"] = os.environ.get('CONTROLPLANE_SYSTEM_USER', settings['user']) - settings["password"] = os.environ.get('CONTROLPLANE_SYSTEM_PASSWORD', settings['password']) + settings["user"] = os.environ.get( + "CONTROLPLANE_SYSTEM_USER", settings["user"] + ) + settings["password"] = os.environ.get( + "CONTROLPLANE_SYSTEM_PASSWORD", settings["password"] + ) return settings + +__all__ = ( + "ControlCenterError", + "ControlPlaneClient", + "Host", + "HostFactory", + "ServiceDefinition", + "ServiceDefinitionFactory", + "ServiceInstance", + "ServiceInstanceFactory", + "ServiceJsonDecoder", + "ServiceJsonEncoder", + "ServiceTree", + "configuration", +) diff --git a/Products/ZenUtils/controlplane/application.py b/Products/ZenUtils/controlplane/application.py index 56be6f668f..8e6f253ed8 100644 --- a/Products/ZenUtils/controlplane/application.py +++ b/Products/ZenUtils/controlplane/application.py @@ -12,42 +12,46 @@ """ import logging -import os import re import time + +from collections import Sequence, Iterator from fnmatch import fnmatch from functools import wraps -from Products.ZenUtils.controlplane import getConnectionSettings -from collections import Sequence, Iterator from zope.interface import implementer +import six + from Products.ZenUtils.application import ( - IApplicationManager, IApplication, - IApplicationLog, IApplicationConfiguration + IApplication, + IApplicationConfiguration, + IApplicationLog, + IApplicationManager, ) +from Products.ZenUtils.controlplane import getConnectionSettings from .client import ControlPlaneClient, ControlCenterError +from .environment import configuration as cc_config from .runstates import RunStates LOG = logging.getLogger("zen.controlplane") -_TENANT_ID_ENV = "CONTROLPLANE_TENANT_ID" MEM_MULTIPLIER = { - 'k':1024, - 'm':1024*1024, - 'g':1024*1024*1024, - 't':1024*1024*1024*1024 + "k": 1024, + "m": 1024 * 1024, + "g": 1024 * 1024 * 1024, + "t": 1024 * 1024 * 1024 * 1024, } def getTenantId(): - """Returns the tenant ID from the environment. - """ - tid = os.environ.get(_TENANT_ID_ENV) - if tid is None: + """Returns the tenant ID from the environment.""" + tid = cc_config.tenant_id + if not tid: LOG.error( "ERROR: Could not determine the tenantID from the environment" ) + return None return tid @@ -57,21 +61,21 @@ def _search(services, params): """ if "name" in params: namepat = params["name"] - services = ( - svc for svc in services if fnmatch(svc.name, namepat) - ) + services = (svc for svc in services if fnmatch(svc.name, namepat)) if "tags" in params: tags = set(params["tags"]) - includes = set(t for t in tags if not t.startswith('-')) - excludes = set(t[1:] for t in tags if t.startswith('-')) + includes = set(t for t in tags if not t.startswith("-")) + excludes = set(t[1:] for t in tags if t.startswith("-")) if includes: services = ( - svc for svc in services + svc + for svc in services if svc.tags and (set(svc.tags) & includes == includes) ) if excludes: services = ( - svc for svc in services + svc + for svc in services if not svc.tags or excludes.isdisjoint(set(svc.tags)) ) return services @@ -90,9 +94,9 @@ def _search(services, params): # removed services and/or implement this cache using redis or memcached (or # something else equivalent) and have it shared among all the Zopes. + class _Cache(object): - """Cache for ServiceDefinition objects. - """ + """Cache for ServiceDefinition objects.""" def __init__(self, client, ttl=60): """Initialize an instance of _Cache. @@ -106,8 +110,7 @@ def __init__(self, client, ttl=60): self._ttl = ttl def _load(self): - """Load all the data into the cache. - """ + """Load all the data into the cache.""" tenant_id = getTenantId() if tenant_id is None: self._data = None @@ -117,8 +120,7 @@ def _load(self): self._lastUpdate = time.time() def _refresh(self): - """Update the cache with changes. - """ + """Update the cache with changes.""" since = int((time.time() - self._lastUpdate) * 1000) # No refresh if no time has elapsed since the last update if since == 0: @@ -130,9 +132,11 @@ def _refresh(self): for changed_svc in changes: idx = next( ( - idx for idx, svc in enumerate(self._data) + idx + for idx, svc in enumerate(self._data) if svc.id == changed_svc.id - ), None + ), + None, ) if idx is not None: # Update existing service in cache @@ -142,8 +146,7 @@ def _refresh(self): self._data.append(changed_svc) def get(self): - """Return the cached data. - """ + """Return the cached data.""" if not self.__nonzero__(): self._load() else: @@ -151,14 +154,12 @@ def get(self): return self._data def clear(self): - """Clear the cache. - """ + """Clear the cache.""" self._data = None self._lastUpdate = 0 def __nonzero__(self): - """Return True if there is cached data. - """ + """Return True if there is cached data.""" age = int(time.time() - self._lastUpdate) return age < self._ttl and self._data is not None @@ -196,7 +197,7 @@ def query(self, name=None, tags=None, monitorName=None): if name: params["name"] = name if tags: - if isinstance(tags, (str, unicode)): + if isinstance(tags, six.string_types): tags = [tags] params["tags"] = tags @@ -209,17 +210,12 @@ def query(self, name=None, tags=None, monitorName=None): # applications. tags = set(tags) - set(["daemon"]) tags.add("-daemon") - params = { - "name": monitorName, - "tags": list(tags) - } + params = {"name": monitorName, "tags": list(tags)} parent = next(_search(services, params), None) # If the monitor name wasn't found, return an empty sequence. if not parent: return () - result = ( - svc for svc in result if svc.parentId == parent.id - ) + result = (svc for svc in result if svc.parentId == parent.id) return tuple(self._getApp(service) for service in result) @@ -229,8 +225,7 @@ def get(self, id, default=None): The default argument is returned if the application doesn't exist. """ service = next( - (svc for svc in self._servicecache.get() if svc.id == id), - None + (svc for svc in self._servicecache.get() if svc.id == id), None ) if not service: return default @@ -249,8 +244,9 @@ class DeployedApp(object): """ Control and interact with the deployed app via the control plane. """ + UNKNOWN_STATUS = type( - 'SENTINEL', (object,), {'__nonzero__': lambda x: False} + "SENTINEL", (object,), {"__nonzero__": lambda x: False} )() def __init__(self, service, client, runstate): @@ -263,11 +259,13 @@ def _initStatus(fn): """ Decorator which calls updateStatus if status is uninitialized """ + @wraps(fn) def wrapper(self, *args, **kwargs): if self._status == DeployedApp.UNKNOWN_STATUS: self.updateStatus(*args, **kwargs) return fn(self) + return wrapper def updateStatus(self): @@ -344,15 +342,17 @@ def autostart(self): @autostart.setter def autostart(self, value): - value = self._service.LAUNCH_MODE.AUTO \ - if bool(value) else self._service.LAUNCH_MODE.MANUAL + value = ( + self._service.LAUNCH_MODE.AUTO + if bool(value) + else self._service.LAUNCH_MODE.MANUAL + ) self._service.launch = value self._client.updateServiceProperty(self._service, "Launch") @property def configurations(self): - """ - """ + """ """ return _DeployedAppConfigList(self._service, self._client) @configurations.setter @@ -404,16 +404,13 @@ def restart(self): if priorState != self._runstate.state: LOG.info("[%x] RESTARTING APP", id(self)) if self._status: - self._client.killInstance( - self._status.hostId, self._status.id - ) + self._client.killInstance(self._status.hostId, self._status.id) else: self._service.desiredState = self._service.STATE.RUN self._client.startService(self._service.id) def update(self): - """ - """ + """ """ self._client.updateService(self._service) @property @@ -422,9 +419,16 @@ def RAMCommitment(self): Get the RAM Commitment of the service and trasform it in the byte value. RAMCommitment: string in form """ - match = re.search("(?P[0-9]*\.?[0-9]*)(?P[k,m,g,t]+)", self._service.RAMCommitment, re.IGNORECASE) - if not match : return - RAMCommitment_bytes = int(match.group('value')) * MEM_MULTIPLIER.get(match.group('unit').lower()) + match = re.search( + "(?P[0-9]*\.?[0-9]*)(?P[k,m,g,t]+)", + self._service.RAMCommitment, + re.IGNORECASE, + ) + if not match: + return + RAMCommitment_bytes = int(match.group("value")) * MEM_MULTIPLIER.get( + match.group("unit").lower() + ) return RAMCommitment_bytes @@ -438,7 +442,9 @@ class _DeployedAppConfigList(Sequence): def __init__(self, service, client): self._service = service if not service._data.has_key("ConfigFiles"): - service._data["ConfigFiles"] = client.getService(service.id)._data["ConfigFiles"] + service._data["ConfigFiles"] = client.getService(service.id)._data[ + "ConfigFiles" + ] self._client = client def __getitem__(self, index): @@ -484,13 +490,13 @@ def __iter__(self): def next(self): return DeployedAppConfig( - self._service, self._client, self._iter.next()) + self._service, self._client, self._iter.next() + ) @implementer(IApplicationConfiguration) class DeployedAppConfig(object): - """ - """ + """ """ def __init__(self, service, client, config): self._service = service @@ -514,8 +520,7 @@ def content(self, content): @implementer(IApplicationLog) class DeployedAppLog(object): - """ - """ + """ """ def __init__(self, instance, client): self._status = instance @@ -534,5 +539,8 @@ def last(self, count): __all__ = ( - "DeployedApp", "DeployedAppConfig", "DeployedAppLog", "DeployedAppLookup" + "DeployedApp", + "DeployedAppConfig", + "DeployedAppLog", + "DeployedAppLookup", ) diff --git a/Products/ZenUtils/controlplane/client.py b/Products/ZenUtils/controlplane/client.py index 53c9be27ee..9f1c1db0a0 100644 --- a/Products/ZenUtils/controlplane/client.py +++ b/Products/ZenUtils/controlplane/client.py @@ -10,10 +10,10 @@ """ ControlPlaneClient """ + import fnmatch import json import logging -import os import urllib import urllib2 @@ -22,30 +22,35 @@ from errno import ECONNRESET from urlparse import urlunparse -from .data import (ServiceJsonDecoder, ServiceJsonEncoder, HostJsonDecoder, - ServiceStatusJsonDecoder, InstanceV2ToServiceStatusJsonDecoder) +import six +from .data import ( + HostJsonDecoder, + InstanceV2ToServiceStatusJsonDecoder, + ServiceJsonDecoder, + ServiceJsonEncoder, + ServiceStatusJsonDecoder, +) +from .environment import configuration as cc_config LOG = logging.getLogger("zen.controlplane.client") -SERVICED_VERSION_ENV = "SERVICED_VERSION" - - def getCCVersion(): """ Checks if the client is connecting to Hoth or newer. The cc version is injected in the containers by serviced """ - cc_version = os.environ.get(SERVICED_VERSION_ENV) - if cc_version: # CC is >= 1.2.0 + cc_version = cc_config.version + if cc_version: # CC is >= 1.2.0 LOG.debug("Detected CC version >= 1.2.0") else: cc_version = "1.1.X" return cc_version -class ControlCenterError(Exception): pass +class ControlCenterError(Exception): + pass class _Request(urllib2.Request): @@ -59,22 +64,23 @@ def __init__(self, *args, **kwargs): urllib2.Request.__init__(self, *args, **kwargs) def get_method(self): - return self.__method \ - if self.__method else urllib2.Request.get_method(self) + return ( + self.__method + if self.__method + else urllib2.Request.get_method(self) + ) class ControlPlaneClient(object): - """ - """ + """ """ def __init__(self, user, password, host=None, port=None): - """ - """ + """ """ self._cj = CookieJar() self._opener = urllib2.build_opener( urllib2.HTTPHandler(), urllib2.HTTPSHandler(), - urllib2.HTTPCookieProcessor(self._cj) + urllib2.HTTPCookieProcessor(self._cj), ) # Zproxy always provides a proxy to serviced on port 443 self._server = { @@ -95,7 +101,7 @@ def _checkUseHttps(self): """ use_https = True cc_master = self._server.get("host") - if self._hothOrNewer and cc_master in [ "localhost", "127.0.0.1" ]: + if self._hothOrNewer and cc_master in ["localhost", "127.0.0.1"]: use_https = False return use_https @@ -111,13 +117,13 @@ def queryServices(self, name=None, tags=None, tenantID=None): namepat = namepat.replace("\\Z", "\\z") query["name"] = namepat if tags: - if isinstance(tags, (str, unicode)): + if isinstance(tags, six.string_types): tags = [tags] - query["tags"] = ','.join(tags) + query["tags"] = ",".join(tags) if tenantID: query["tenantID"] = tenantID response = self._dorequest(self._servicesEndpoint, query=query) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() decoded = ServiceJsonDecoder().decode(body) if decoded is None: @@ -129,7 +135,7 @@ def getService(self, serviceId, default=None): Returns the ServiceDefinition object for the given service. """ response = self._dorequest("/services/%s" % serviceId) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return ServiceJsonDecoder().decode(body) @@ -143,7 +149,7 @@ def getChangesSince(self, age): """ query = {"since": age} response = self._dorequest(self._servicesEndpoint, query=query) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() decoded = ServiceJsonDecoder().decode(body) if decoded is None: @@ -159,12 +165,16 @@ def updateServiceProperty(self, service, prop): oldService = self.getService(service.id) oldService._data[prop] = service._data[prop] body = ServiceJsonEncoder().encode(oldService) - LOG.info("Updating prop '%s' for service '%s':%s resourceId=%s", prop, service.name, service.id, service.resourceId) - LOG.debug("Updating service %s", body) - response = self._dorequest( - service.resourceId, method="PUT", data=body + LOG.info( + "Updating prop '%s' for service '%s':%s resourceId=%s", + prop, + service.name, + service.id, + service.resourceId, ) - body = ''.join(response.readlines()) + LOG.debug("Updating service %s", body) + response = self._dorequest(service.resourceId, method="PUT", data=body) + body = "".join(response.readlines()) response.close() def updateService(self, service): @@ -176,10 +186,8 @@ def updateService(self, service): body = ServiceJsonEncoder().encode(service) LOG.info("Updating service '%s':%s", service.name, service.id) LOG.debug("Updating service %s", body) - response = self._dorequest( - service.resourceId, method="PUT", data=body - ) - body = ''.join(response.readlines()) + response = self._dorequest(service.resourceId, method="PUT", data=body) + body = "".join(response.readlines()) response.close() def startService(self, serviceId): @@ -189,9 +197,10 @@ def startService(self, serviceId): :param string ServiceId: The service to start """ LOG.info("Starting service '%s", serviceId) - response = self._dorequest("/services/%s/startService" % serviceId, - method='PUT') - body = ''.join(response.readlines()) + response = self._dorequest( + "/services/%s/startService" % serviceId, method="PUT" + ) + body = "".join(response.readlines()) response.close() return ServiceJsonDecoder().decode(body) @@ -202,9 +211,10 @@ def stopService(self, serviceId): :param string ServiceId: The service to stop """ LOG.info("Stopping service %s", serviceId) - response = self._dorequest("/services/%s/stopService" % serviceId, - method='PUT') - body = ''.join(response.readlines()) + response = self._dorequest( + "/services/%s/stopService" % serviceId, method="PUT" + ) + body = "".join(response.readlines()) response.close() return ServiceJsonDecoder().decode(body) @@ -220,7 +230,7 @@ def addService(self, serviceDefinition): response = self._dorequest( "/services/add", method="POST", data=serviceDefinition ) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return body @@ -231,9 +241,7 @@ def deleteService(self, serviceId): :param string serviceId: Id of the service to delete """ LOG.info("Removing service %s", serviceId) - response = self._dorequest( - "/services/%s" % serviceId, method="DELETE" - ) + response = self._dorequest("/services/%s" % serviceId, method="DELETE") response.close() def deployService(self, parentId, service): @@ -245,15 +253,12 @@ def deployService(self, parentId, service): :returns string: json encoded representation of new service's links """ LOG.info("Deploying service") - data = { - 'ParentID': parentId, - 'Service': json.loads(service) - } + data = {"ParentID": parentId, "Service": json.loads(service)} LOG.debug(data) response = self._dorequest( "/services/deploy", method="POST", data=json.dumps(data) ) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return body @@ -262,11 +267,10 @@ def queryServiceInstances(self, serviceId): Returns a sequence of ServiceInstance objects. """ response = self._dorequest("/services/%s/running" % serviceId) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return ServiceJsonDecoder().decode(body) - def queryServiceStatus(self, serviceId): """ CC version-independent call to get the status of a service. @@ -299,7 +303,7 @@ def queryServiceStatusImpl(self, serviceId): :rtype: dict of ServiceStatus objects with ID as key """ response = self._dorequest("/services/%s/status" % serviceId) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() decoded = ServiceStatusJsonDecoder().decode(body) return decoded @@ -314,9 +318,10 @@ def queryServiceInstancesV2(self, serviceId): :returns: The raw result of the query :rtype: json formatted string """ - response = self._dorequest("%s/services/%s/instances" % (self._v2loc, - serviceId)) - body = ''.join(response.readlines()) + response = self._dorequest( + "%s/services/%s/instances" % (self._v2loc, serviceId) + ) + body = "".join(response.readlines()) response.close() return body @@ -336,13 +341,12 @@ def _convertInstancesV2ToStatuses(self, rawV2Instance): decoded = {instance.id: instance for instance in decoded} return decoded - def queryHosts(self): """ Returns a sequence of Host objects. """ response = self._dorequest("/hosts") - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return HostJsonDecoder().decode(body) @@ -350,8 +354,8 @@ def getHost(self, hostId): """ Returns a sequence of Host objects. """ - response = self._dorequest("/hosts/%" % hostId) - body = ''.join(response.readlines()) + response = self._dorequest("/hosts/%s" % hostId) + body = "".join(response.readlines()) response.close() return HostJsonDecoder().decode(body) @@ -362,54 +366,49 @@ def getInstance(self, serviceId, instanceId, default=None): response = self._dorequest( "/services/%s/running/%s" % (serviceId, instanceId) ) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return ServiceJsonDecoder().decode(body) def getServiceLog(self, serviceId, start=0, end=None): - """ - """ + """ """ response = self._dorequest("/services/%s/logs" % serviceId) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() log = json.loads(body) return log["Detail"] def getInstanceLog(self, serviceId, instanceId, start=0, end=None): - """ - """ + """ """ response = self._dorequest( "/services/%s/%s/logs" % (serviceId, instanceId) ) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() log = json.loads(body) return str(log["Detail"]) def killInstance(self, hostId, uuid): - """ - """ + """ """ response = self._dorequest( "/hosts/%s/%s" % (hostId, uuid), method="DELETE" ) response.close() def getServicesForMigration(self, serviceId): - """ - """ + """ """ query = {"includeChildren": "true"} response = self._dorequest("/services/%s" % serviceId, query=query) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return json.loads(body) def postServicesForMigration(self, data, serviceId): - """ - """ + """ """ response = self._dorequest( "/services/%s/migrate" % serviceId, method="POST", data=data ) - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return body @@ -418,7 +417,7 @@ def getPoolsData(self): Get all the pools and return raw json """ response = self._dorequest("/pools") - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return body @@ -427,7 +426,7 @@ def getHostsData(self): Get all the pools and return raw json """ response = self._dorequest("/hosts") - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return body @@ -435,16 +434,16 @@ def getRunningServicesData(self): """ Get all the running services and return raw json """ - body = '' + body = "" if not self._hothOrNewer: response = self._dorequest("/running") - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() else: hostsData = self.queryHosts() for hostID in hostsData: - response = self._dorequest("/hosts/%s/running" %hostID) - body = body + ''.join(response.readlines()) + response = self._dorequest("/hosts/%s/running" % hostID) + body = body + "".join(response.readlines()) response.close() return body @@ -453,14 +452,22 @@ def getStorageData(self): Get the storage information and return raw json """ response = self._dorequest("/storage") - body = ''.join(response.readlines()) + body = "".join(response.readlines()) response.close() return body def _makeRequest(self, uri, method=None, data=None, query=None): query = urllib.urlencode(query) if query else "" - url = urlunparse(("https" if self._useHttps else "http", - self._netloc, uri, "", query, "")) + url = urlunparse( + ( + "https" if self._useHttps else "http", + self._netloc, + uri, + "", + query, + "", + ) + ) args = {} if method: args["method"] = method @@ -481,7 +488,9 @@ def _login(self): def _dorequest(self, uri, method=None, data=None, query=None): # Try to perform the request up to five times for trycount in range(5): - request = self._makeRequest(uri, method=method, data=data, query=query) + request = self._makeRequest( + uri, method=method, data=data, query=query + ) try: return self._opener.open(request) except urllib2.HTTPError as ex: @@ -494,35 +503,39 @@ def _dorequest(self, uri, method=None, data=None, query=None): msg = json.load(ex) except ValueError: raise ex # This stinks because we lose the stack - detail = msg.get('Detail') + detail = msg.get("Detail") if not detail: raise detail = detail.replace("Internal Server Error: ", "") raise ControlCenterError(detail) raise - # The CC server resets the connection when an unauthenticated POST requesti is - # made. Depending on when during the request lifecycle the connection is reset, - # we can get either an URLError with a socket.error as the reason, or a naked - # socket.error. In either case, the socket.error.errno indicates that the - # connection was reset with an errno of ECONNRESET (104). - # When we get a connection reset exception, assume that the reset was caused - # by lack of authentication, login, and retry the request. + # The CC server resets the connection when an unauthenticated + # POST requesti is made. Depending on when during the request + # lifecycle the connection is reset, we can get either an + # URLError with a socket.error as the reason, or a naked + # socket.error. In either case, the socket.error.errno + # indicates that the connection was reset with an errno of + # ECONNRESET (104). When we get a connection reset exception, + # assume that the reset was caused by lack of authentication, + # login, and retry the request. except urllib2.URLError as ex: reason = ex.reason - if type(reason) == socket_error and reason.errno == ECONNRESET: + if ( + isinstance(reason, socket_error) + and reason.errno == ECONNRESET + ): self._login() continue raise except socket_error as ex: if ex.errno == ECONNRESET: - self._login() - continue + self._login() + continue raise else: # break the loop so we skip the loop's else clause break - else: # raises the last exception that was raised (the 401 error) raise @@ -550,19 +563,16 @@ def cookies(self): for cookie in self._get_cookie_jar(): cookies.append( { - 'name': cookie.name, - 'value': cookie.value, - 'domain': cookie.domain, - 'path': cookie.path, - 'expires': cookie.expires, - 'secure': cookie.discard + "name": cookie.name, + "value": cookie.value, + "domain": cookie.domain, + "path": cookie.path, + "expires": cookie.expires, + "secure": cookie.discard, } ) return cookies # Define the names to export via 'from client import *'. -__all__ = ( - "ControlPlaneClient", - "ControlCenterError" -) +__all__ = ("ControlPlaneClient", "ControlCenterError") diff --git a/Products/ZenUtils/controlplane/data.py b/Products/ZenUtils/controlplane/data.py index cabf4cf287..b974d276c4 100644 --- a/Products/ZenUtils/controlplane/data.py +++ b/Products/ZenUtils/controlplane/data.py @@ -221,7 +221,6 @@ def wrapper(*args, **kw): "stopped": ApplicationState.STOPPED, "started": ApplicationState.RUNNING, "pulling": ApplicationState.STARTING, - "resuming": ApplicationState.STARTING, "resumed": ApplicationState.RUNNING, "pending_restart": ApplicationState.STARTING, "emergency_stopping": ApplicationState.STOPPING, diff --git a/Products/ZenUtils/controlplane/environment.py b/Products/ZenUtils/controlplane/environment.py new file mode 100644 index 0000000000..4b07fd7347 --- /dev/null +++ b/Products/ZenUtils/controlplane/environment.py @@ -0,0 +1,126 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +import os + +from backports.functools_lru_cache import lru_cache + + +class _EnviromentVariables(object): + _CONSUMER_URL = "CONTROLPLANE_CONSUMER_URL" + _HOST_ID = "CONTROLPLANE_HOST_ID" + _HOST_IPS = "CONTROLPLANE_HOST_IPS" + _IMAGE_ID = "SERVICED_SERVICE_IMAGE" + _INSTANCE_ID = "CONTROLPLANE_INSTANCE_ID" + _LOG_ADDRESS = "SERVICED_LOG_ADDRESS" + _MASTER_IP = "SERVICED_MASTER_IP" + _MAX_RPC_CLIENTS = "SERVICED_MAX_RPC_CLIENTS" + _MUX_PORT = "SERVICED_MUX_PORT" + _RPC_PORT = "SERVICED_RPC_PORT" + _RUN = "CONTROLPLANE" + _SERVICE_ID = "CONTROLPLANE_SERVICED_ID" + _SHELL = "SERVICED_IS_SERVICE_SHELL" + _TENANT_ID = "CONTROLPLANE_TENANT_ID" + _UI_PORT = "SERVICED_UI_PORT" + _VERSION = "SERVICED_VERSION" + _VIRTUAL_ADDRESS_SUBNET = "SERVICED_VIRTUAL_ADDRESS_SUBNET" + + @staticmethod + def _get(name): + return os.environ.get(name, "") + + @property + @lru_cache(maxsize=1) + def is_serviced(self): + return self._get(self._RUN) == "1" + + @property + @lru_cache(maxsize=1) + def is_serviced_shell(self): + return self._get(self._SHELL) == "true" + + @property + @lru_cache(maxsize=1) + def consumer_url(self): + return self._get(self._CONSUMER_URL) + + @property + @lru_cache(maxsize=1) + def host_id(self): + return self._get(self._HOST_ID) + + @property + @lru_cache(maxsize=1) + def instance_id(self): + return self._get(self._INSTANCE_ID) + + @property + @lru_cache(maxsize=1) + def service_id(self): + return self._get(self._SERVICE_ID) + + @property + @lru_cache(maxsize=1) + def tenant_id(self): + return self._get(self._TENANT_ID) + + @property + @lru_cache(maxsize=1) + def version(self): + return self._get(self._VERSION) + + @property + @lru_cache(maxsize=1) + def image_id(self): + return self._get(self._IMAGE_ID) + + @property + @lru_cache(maxsize=1) + def host_ips(self): + return tuple( + ip.strip() for ip in self._get(self._HOST_IPS).split(" ") if ip + ) + + @property + @lru_cache(maxsize=1) + def log_address(self): + return self._get(self._LOG_ADDRESS) + + @property + @lru_cache(maxsize=1) + def master_ip(self): + return self._get(self._MASTER_IP) + + @property + @lru_cache(maxsize=1) + def max_rpc_clients(self): + return self._get(self._MAX_RPC_CLIENTS) + + @property + @lru_cache(maxsize=1) + def mux_port(self): + return self._get(self._MUX_PORT) + + @property + @lru_cache(maxsize=1) + def rpc_port(self): + return self._get(self._RPC_PORT) + + @property + @lru_cache(maxsize=1) + def ui_port(self): + return self._get(self._UI_PORT) + + @property + @lru_cache(maxsize=1) + def virtual_address_subnet(self): + return self._get(self._VIRTUAL_ADDRESS_SUBNET) + + +configuration = _EnviromentVariables() diff --git a/Products/ZenUtils/cstat.py b/Products/ZenUtils/cstat.py old mode 100755 new mode 100644 diff --git a/Products/ZenUtils/metricwriter.py b/Products/ZenUtils/metricwriter.py index a14b98adec..a34d5ba8f5 100644 --- a/Products/ZenUtils/metricwriter.py +++ b/Products/ZenUtils/metricwriter.py @@ -30,13 +30,20 @@ def write_metric(self, metric, value, timestamp, tags): @return deferred: metric was published or queued """ try: - if tags and 'mtrace' in tags.keys(): - log.info("mtrace: publishing metric %s %s %s %s", - metric, value, timestamp, tags) - log.debug("publishing metric %s %s %s %s", metric, value, - timestamp, tags) - val = defer.maybeDeferred(self._publisher.put, metric, value, - timestamp, tags) + if tags and "mtrace" in tags.keys(): + log.info( + "mtrace: publishing metric %s %s %s %s", + metric, + value, + timestamp, + tags, + ) + log.debug( + "publishing metric %s %s %s %s", metric, value, timestamp, tags + ) + val = defer.maybeDeferred( + self._publisher.put, metric, value, timestamp, tags + ) self._datapoints += 1 return val except Exception as x: @@ -69,13 +76,24 @@ def write_metric(self, metric, value, timestamp, tags): """ try: if self._test_filter(metric, value, timestamp, tags): - if tags and 'mtrace' in tags.keys(): - log.info("mtrace: publishing metric %s %s %s %s", - metric, value, timestamp, tags) - log.debug("publishing metric %s %s %s %s", metric, value, - timestamp, tags) - val = defer.maybeDeferred(self._publisher.put, metric, value, - timestamp, tags) + if tags and "mtrace" in tags.keys(): + log.info( + "mtrace: publishing metric %s %s %s %s", + metric, + value, + timestamp, + tags, + ) + log.debug( + "publishing metric %s %s %s %s", + metric, + value, + timestamp, + tags, + ) + val = defer.maybeDeferred( + self._publisher.put, metric, value, timestamp, tags + ) self._datapoints += 1 return val except Exception as x: @@ -108,7 +126,11 @@ def write_metric(self, metric, value, timestamp, tags): dList = [] for writer in self._writers: try: - dList.append(defer.maybeDeferred(writer.write_metric, metric, value, timestamp, tags)) + dList.append( + defer.maybeDeferred( + writer.write_metric, metric, value, timestamp, tags + ) + ) except Exception as x: log.exception(x) self._datapoints += 1 @@ -127,7 +149,7 @@ class DerivativeTracker(object): def __init__(self): self._timed_metric_cache = {} - def derivative(self, name, timed_metric, min='U', max='U'): + def derivative(self, name, timed_metric, min="U", max="U"): """ Tracks a metric value over time and returns deltas @@ -148,8 +170,9 @@ def derivative(self, name, timed_metric, min='U', max='U'): # in an infinity/nan rate. return None else: - delta = float(timed_metric[0] - last_timed_metric[0]) / \ - float(timed_metric[1] - last_timed_metric[1]) + delta = float(timed_metric[0] - last_timed_metric[0]) / float( + timed_metric[1] - last_timed_metric[1] + ) # Get min/max into a usable float or None state. min, max = map(constraint_value, (min, max)) @@ -189,7 +212,7 @@ def constraint_value(value): elif isinstance(value, int): return float(value) elif isinstance(value, types.StringTypes): - if value in ('U', ''): + if value in ("U", ""): return None try: @@ -219,7 +242,15 @@ def updateThresholds(self, thresholds): self._thresholds.updateList(thresholds) @defer.inlineCallbacks - def notify(self, context_uuid, context_id, metric, timestamp, value, thresh_event_data={}): + def notify( + self, + context_uuid, + context_id, + metric, + timestamp, + value, + thresh_event_data=None, + ): """ Check the specified value against thresholds and send any generated events @@ -233,22 +264,24 @@ def notify(self, context_uuid, context_id, metric, timestamp, value, thresh_even @return: """ if self._thresholds and value is not None: - if 'eventKey' in thresh_event_data: - eventKeyPrefix = [thresh_event_data['eventKey']] + thresh_event_data = thresh_event_data or {} + if "eventKey" in thresh_event_data: + eventKeyPrefix = [thresh_event_data["eventKey"]] else: eventKeyPrefix = [metric] - for ev in self._thresholds.check(context_uuid, metric, timestamp, value): + for ev in self._thresholds.check( + context_uuid, metric, timestamp, value + ): parts = eventKeyPrefix[:] - if 'eventKey' in ev: - parts.append(ev['eventKey']) - ev['eventKey'] = '|'.join(parts) + if "eventKey" in ev: + parts.append(ev["eventKey"]) + ev["eventKey"] = "|".join(parts) # add any additional values for this threshold # (only update if key is not in event, or if # the event's value is blank or None) for key, value in thresh_event_data.items(): - if ev.get(key, None) in ('', None): + if ev.get(key, None) in ("", None): ev[key] = value if ev.get("component", None): - ev['component_guid'] = context_uuid + ev["component_guid"] = context_uuid yield defer.maybeDeferred(self._send_callback, ev) - diff --git a/Products/ZenUtils/patches/mysqladaptermonkey.py b/Products/ZenUtils/patches/mysqladaptermonkey.py index 0d67f7cdab..1e9c9825ff 100644 --- a/Products/ZenUtils/patches/mysqladaptermonkey.py +++ b/Products/ZenUtils/patches/mysqladaptermonkey.py @@ -88,7 +88,7 @@ def open(self, *args, **kwargs): return conn, cursor @monkeypatch('relstorage.adapters.mysql.connmanager.MySQLdbConnectionManager') - def close(self,conn,cursor): + def close(self, conn=None, cursor=None): try: if conn is not None and cursor is not None: sql = "DELETE FROM connection_info WHERE connection_id = connection_id();" @@ -96,7 +96,7 @@ def close(self,conn,cursor): conn.commit() except Exception: pass - original(self, conn, cursor) + original(self, conn=conn, cursor=cursor) @monkeypatch('relstorage.adapters.mysql.schema.MySQLSchemaInstaller') def create(self, cursor): diff --git a/Products/ZenUtils/patches/zodbpackmonkey.py b/Products/ZenUtils/patches/zodbpackmonkey.py index 1529c50ca3..3fbcabaa99 100644 --- a/Products/ZenUtils/patches/zodbpackmonkey.py +++ b/Products/ZenUtils/patches/zodbpackmonkey.py @@ -621,7 +621,9 @@ def pack(self, pack_tid, packed_func=None): prevent_pke_oids = self.remove_connected_oids(conn, cursor, grouped_oids, packed_func, total, oids_processed) - self._pack_cleanup(conn, cursor) + store_connection = PrePackConnection(self.connmanager) + + self._pack_cleanup(store_connection=store_connection) try: if skipped_oids: @@ -640,6 +642,98 @@ def pack(self, pack_tid, packed_func=None): finally: self.connmanager.close(conn, cursor) + @monkeypatch('relstorage.adapters.packundo.HistoryFreePackUndo') + def _add_refs_for_oids(self, load_batcher, store_batcher, oids, get_references): + + """ + Fill object_refs with the states for some objects. + + Returns the number of references added. + """ + # oids should be a slice of an ``OidList``, which may be an + # ``array.array``; those are relatively slow to iterate. + + # The batcher always does deletes before inserts, which is + # exactly what we want. + # In the past, we performed all deletes and then all inserts; + # now, things to batching, they could be interleaved, but + # because we process OID-by-OID, that should be fine. + # In the past, we also DELETED from object_refs_added and object_ref + # everything found in the ``oids`` parameter; now we only do a delete if + # we get back a row from object_state; again, that shouldn't matter, rows + # should be found in object_state. + object_ref_schema = store_batcher.row_schema_of_length(3) + object_refs_added_schema = store_batcher.row_schema_of_length(2) + + # Use the batcher to get efficient ``= ANY()`` + # queries, but go ahead and collect into a list at once + rows = list(load_batcher.select_from( + ('zoid', 'tid', 'state'), + 'object_state', + suffix=' ORDER BY zoid ', + zoid=oids + )) + + num_refs_found = 0 + + for from_oid, tid, state in rows: + state = self.driver.binary_column_as_state_type(state) + row = (from_oid, tid) + # Check if the row already exists + existing_row = store_batcher.select_from( + 'object_refs_added', + ('zoid',), + zoid=from_oid + ) + + # We monkey-patched this method to add the check below (everything else is the same as the original + # relstorage.adapters.packundo.HistoryFreePackUndo._add_refs_for_oids). + # This check helps avoid an IntegrityError, + # which started occurring after upgrading RelStorage to version 3.5.0. + # Previously, before inserting the zoid, it was first deleted from the object_refs_added table. + if not existing_row: + store_batcher.insert_into( + 'object_refs_added (zoid, tid)', + object_refs_added_schema, + row, + row, + size=2 + ) + + store_batcher.delete_from( + 'object_refs_added', + zoid=from_oid + ) + store_batcher.delete_from( + 'object_ref', + zoid=from_oid + ) + + if state: + try: + to_oids = get_references(state) + except: + log.exception( + "pre_pack: can't unpickle " + "object %d in transaction %d; state length = %d", + from_oid, tid, len(state) + ) + raise + + for to_oid in to_oids: + row = (from_oid, tid, to_oid) + num_refs_found += 1 + store_batcher.insert_into( + 'object_ref (zoid, tid, to_zoid)', + object_ref_schema, + row, + row, + size=3 + ) + + return num_refs_found + + ''' Methods added to support packing systems that have not been packed for a long time and that cause zenossdbpack to crash with an OOM error diff --git a/Products/ZenUtils/requestlogging/ZopeRequestLogger.py b/Products/ZenUtils/requestlogging/ZopeRequestLogger.py old mode 100755 new mode 100644 diff --git a/Products/ZenUtils/snmp.py b/Products/ZenUtils/snmp.py index db7186cb0f..c98e582d4f 100755 --- a/Products/ZenUtils/snmp.py +++ b/Products/ZenUtils/snmp.py @@ -1,20 +1,31 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2010, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## +from __future__ import absolute_import import logging + +from pynetsnmp import usm +from pynetsnmp.twistedsnmp import AgentProxy from twisted.internet import reactor from twisted.internet.defer import Deferred -from pynetsnmp.twistedsnmp import AgentProxy + +authentication_protocols = tuple( + str(p) for p in usm.auth_protocols if p != usm.AUTH_NOAUTH +) +privacy_protocols = tuple( + str(p) for p in usm.priv_protocols if p != usm.PRIV_NOPRIV +) _LOG = logging.getLogger("zen.ZenUtils.snmp") + class SnmpConfig(object): succeeded = None sysName = None @@ -31,9 +42,15 @@ def community(self): def weight(self): return self._weight is None and self.defaultWeight or self._weight - - def __init__(self, ip, weight=None, port=161, timeout=2.5, retries=2, - community='public'): + def __init__( + self, + ip, + weight=None, + port=161, + timeout=2.5, + retries=2, + community="public", + ): self._ip = ip self._weight = weight self._port = port @@ -41,30 +58,32 @@ def __init__(self, ip, weight=None, port=161, timeout=2.5, retries=2, self._retries = retries self._community = community - def __str__(self): return "(%s) %s:%s, SNMP%s, timeout=%ss, retries=%s, community=%s" % ( - self.weight, self._ip, self._port, self.version, self._timeout, - self._retries, self.community) - + self.weight, + self._ip, + self._port, + self.version, + self._timeout, + self._retries, + self.community, + ) def getAgentProxy(self): - return AgentProxy( - ip=self._ip, - port=self._port, + sec = usm.Community(self.community, version=self.version) + return AgentProxy.create( + (self._ip, self._port), + security=sec, timeout=self._timeout, - tries=self._retries, - snmpVersion=self.version, - community=self._community) - + retries=self._retries + ) - def test(self, oid='.1.3.6.1.2.1.1.5.0'): + def test(self, oid=".1.3.6.1.2.1.1.5.0"): _LOG.debug("SnmpConfig.test: oid=%s" % oid) self._proxy = self.getAgentProxy() self._proxy.open() return self._proxy.get([oid]).addBoth(self.enrichResult) - def enrichResult(self, result): self._proxy.close() if isinstance(result, dict) and bool(result): @@ -79,82 +98,95 @@ def enrichResult(self, result): class SnmpV1Config(SnmpConfig): - version = 'v1' + version = "v1" defaultWeight = 10 class SnmpV2cConfig(SnmpConfig): - version = 'v2c' + version = "v2c" defaultWeight = 20 class SnmpV3Config(SnmpConfig): - version = 'v3' + version = "v3" defaultWeight = 30 - def __init__(self, ip, weight=None, port=161, timeout=2.5, retries=2, - community='public', securityName=None, authType=None, - authPassphrase=None, privType=None, privPassphrase=None): + def __init__( + self, + ip, + weight=None, + port=161, + timeout=2.5, + retries=2, + community="public", + securityName=None, + authType=None, + authPassphrase=None, + privType=None, + privPassphrase=None, + engine=None, + context=None + ): super(SnmpV3Config, self).__init__( - ip, weight, port, timeout, retries, community) + ip, weight, port, timeout, retries, community + ) self._securityName = securityName self._authType = authType self._authPassphrase = authPassphrase self._privType = privType self._privPassphrase = privPassphrase - + self._engine = engine + self._context = context def __str__(self): v3string = "securityName=%s" % self._securityName if self._authType: v3string += ", authType=%s, authPassphrase=%s" % ( - self._authType, self._authPassphrase) + self._authType, + self._authPassphrase, + ) if self._privType: v3string += " privType=%s, privPassphrase=%s" % ( - self._privType, self._privPassphrase) + self._privType, + self._privPassphrase, + ) return "(%s) %s:%s, SNMP%s, timeout=%ss, retries=%s, %s" % ( - self.weight, self._ip, self._port, self.version, self._timeout, - self._retries, v3string) - + self.weight, + self._ip, + self._port, + self.version, + self._timeout, + self._retries, + v3string, + ) def getAgentProxy(self): - cmdLineArgs = ['-u', self._securityName] - - if self._privType: - cmdLineArgs += [ - '-l', 'authPriv', - '-x', self._privType, - '-X', self._privPassphrase] - elif self._authType: - cmdLineArgs += [ - '-l', 'authNoPriv'] - else: - cmdLineArgs += [ - '-l', 'noAuthNoPriv'] - - if self._authType: - cmdLineArgs += [ - '-a', self._authType, - '-A', self._authPassphrase] - - return AgentProxy( - ip=self._ip, - port=self._port, + sec = usm.User( + self._securityName, + auth=usm.Authentication( + self._authType, self._authPassphrase + ), + priv=usm.Privacy(self._privType, self._privPassphrase), + engine=self._engine, + context=self._context + ) + return AgentProxy.create( + (self._ip, self._port), + security=sec, timeout=self._timeout, - tries=self._retries, - snmpVersion=self.version, - community=self._community, - cmdLineArgs=cmdLineArgs) - + retries=self._retries, + ) def enrichResult(self, result): self._proxy.close() - if isinstance(result, dict) \ - and len(result.keys()) > 0 \ - and not result.keys()[0].startswith('.1.3.6.1.6.3.15.1.1.'): + if ( + isinstance(result, dict) + and len(result.keys()) > 0 + and not result.keys()[0].startswith(".1.3.6.1.6.3.15.1.1.") + ): self.sysName = result.values()[0] self.succeeded = True else: @@ -167,7 +199,7 @@ class SnmpAgentDiscoverer(object): _bestsofar = None def _handleResult(self, result): - if not hasattr(result, 'weight'): + if not hasattr(result, "weight"): # http://dev.zenoss.org/trac/ticket/6268 return @@ -199,7 +231,6 @@ def _handleResult(self, result): if len(self._pending) < 1 and not self._d.called: self._d.callback(self._bestsofar) - def findBestConfig(self, configs): """ Returns the best SnmpConfig in the provided configs list. @@ -214,10 +245,10 @@ def findBestConfig(self, configs): return self._d -if __name__ == '__main__': +if __name__ == "__main__": """ The following snmpd.conf is a good one to run the following tests on. - + rocommunity zenosszenoss rouser noauthtest noauth createUser noauthtest MD5 "zenosszenoss" @@ -226,26 +257,34 @@ def findBestConfig(self, configs): rouser privtest createUser privtest SHA "zenosszenoss" DES "zenosszenoss" """ + def printAndExit(result): - print result + print(result) reactor.stop() configs = [ - SnmpV3Config('127.0.0.1', weight=33, securityName='privtest', - authType='SHA', authPassphrase='zenosszenoss', - privType='DES', privPassphrase='zenosszenoss'), - - SnmpV3Config('127.0.0.1', weight=32, securityName='authtest', - authType='SHA', authPassphrase='zenosszenoss'), - - SnmpV3Config('127.0.0.1', weight=31, securityName='noauthtest'), - - SnmpV2cConfig('127.0.0.1', weight=22, community='zenosszenoss'), - SnmpV2cConfig('127.0.0.1', weight=21, community='public'), - - SnmpV1Config('127.0.0.1', weight=12, community='zenosszenoss'), - SnmpV1Config('127.0.0.1', weight=11, community='public'), - ] + SnmpV3Config( + "127.0.0.1", + weight=33, + securityName="privtest", + authType="SHA", + authPassphrase="zenosszenoss", + privType="DES", + privPassphrase="zenosszenoss", + ), + SnmpV3Config( + "127.0.0.1", + weight=32, + securityName="authtest", + authType="SHA", + authPassphrase="zenosszenoss", + ), + SnmpV3Config("127.0.0.1", weight=31, securityName="noauthtest"), + SnmpV2cConfig("127.0.0.1", weight=22, community="zenosszenoss"), + SnmpV2cConfig("127.0.0.1", weight=21, community="public"), + SnmpV1Config("127.0.0.1", weight=12, community="zenosszenoss"), + SnmpV1Config("127.0.0.1", weight=11, community="public"), + ] sad = SnmpAgentDiscoverer() sad.findBestConfig(configs).addBoth(printAndExit) diff --git a/Products/ZenUtils/terminal_size.py b/Products/ZenUtils/terminal_size.py new file mode 100644 index 0000000000..9f7325b006 --- /dev/null +++ b/Products/ZenUtils/terminal_size.py @@ -0,0 +1,125 @@ +"""This is a backport of shutil.get_terminal_size from Python 3.3. + +The original implementation is in C, but here we use the ctypes and +fcntl modules to create a pure Python version of os.get_terminal_size. + +Pulled from https://github.com/chrippa/backports.shutil_get_terminal_size + + +The MIT License (MIT) + +Copyright (c) 2014 Christopher Rosell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import os +import struct +import sys + +from collections import namedtuple + +__all__ = ["get_terminal_size"] + + +terminal_size = namedtuple("terminal_size", "columns lines") + +try: + from ctypes import windll, create_string_buffer, WinError + + _handle_ids = { + 0: -10, + 1: -11, + 2: -12, + } + + def _get_terminal_size(fd): + handle = windll.kernel32.GetStdHandle(_handle_ids[fd]) + if handle == 0: + raise OSError('handle cannot be retrieved') + if handle == -1: + raise WinError() + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi) + if res: + res = struct.unpack("hhhhHhhhhhh", csbi.raw) + left, top, right, bottom = res[5:9] + columns = right - left + 1 + lines = bottom - top + 1 + return terminal_size(columns, lines) + else: + raise WinError() + +except ImportError: + import fcntl + import termios + + def _get_terminal_size(fd): + try: + res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4) + except IOError as e: + raise OSError(e) + lines, columns = struct.unpack("hh", res) + + return terminal_size(columns, lines) + + +def get_terminal_size(fallback=(80, 24)): + """Get the size of the terminal window. + + For each of the two dimensions, the environment variable, COLUMNS + and LINES respectively, is checked. If the variable is defined and + the value is a positive integer, it is used. + + When COLUMNS or LINES is not defined, which is the common case, + the terminal connected to sys.__stdout__ is queried + by invoking os.get_terminal_size. + + If the terminal size cannot be successfully queried, either because + the system doesn't support querying, or because we are not + connected to a terminal, the value given in fallback parameter + is used. Fallback defaults to (80, 24) which is the default + size used by many terminal emulators. + + The value returned is a named tuple of type os.terminal_size. + """ + # Try the environment first + try: + columns = int(os.environ["COLUMNS"]) + except (KeyError, ValueError): + columns = 0 + + try: + lines = int(os.environ["LINES"]) + except (KeyError, ValueError): + lines = 0 + + # Only query if necessary + if columns <= 0 or lines <= 0: + try: + size = _get_terminal_size(sys.__stdout__.fileno()) + except (NameError, OSError): + size = terminal_size(*fallback) + + if columns <= 0: + columns = size.columns + if lines <= 0: + lines = size.lines + + return terminal_size(columns, lines) diff --git a/Products/ZenUtils/tests/testDaemonStats.py b/Products/ZenUtils/tests/testDaemonStats.py index 2eb51946ef..cdef97f457 100644 --- a/Products/ZenUtils/tests/testDaemonStats.py +++ b/Products/ZenUtils/tests/testDaemonStats.py @@ -1,53 +1,66 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2014, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## -import unittest, os +import os +import unittest + +from mock import patch from Products.ZenUtils.DaemonStats import DaemonStats from Products.ZenTestCase.BaseTestCase import BaseTestCase + class DaemonStatsTest(BaseTestCase): """Test the DaemonStats""" - def setUp(self): - self.daemon_stats = DaemonStats() + @patch("Products.ZenUtils.DaemonStats.cc_config", autospec=True) + def testDaemonsTagsServiceId(self, _cc): + _cc.service_id = "ID" + _cc.tenant_id = "foo" + _cc.instance_id = "bar" + daemon_stats = DaemonStats() - def testDaemonsTagsServiceId(self): - os.environ["CONTROLPLANE"] = "1" - os.environ["CONTROLPLANE_SERVICE_ID"] = "ID" - os.environ["CONTROLPLANE_TENANT_ID"] = "foo" - os.environ["CONTROLPLANE_INSTANCE_ID"] = "bar" - self.daemon_stats.config( "name", "monitor", None, None, None) + daemon_stats.config("name", "monitor", None, None, None) self.assertEqual( - {'daemon': 'name', 'instance': 'bar', 'internal': True, - 'monitor': 'monitor', 'metricType': 'type', 'serviceId': 'ID', - 'tenantId': 'foo'}, - self.daemon_stats._tags("type") + { + "daemon": "name", + "instance": "bar", + "internal": True, + "monitor": "monitor", + "metricType": "type", + "serviceId": "ID", + "tenantId": "foo", + }, + daemon_stats._tags("type"), ) - def testDaemonsDoesNotTagServiceId(self): - if "CONTROLPLANE" in os.environ: - del os.environ["CONTROLPLANE"] - - if "CONTROLPLANE_SERVICE_ID" in os.environ: - del os.environ["CONTROLPLANE_SERVICE_ID"] + @patch("Products.ZenUtils.DaemonStats.cc_config", autospec=True) + def testDaemonsDoesNotTagServiceId(self, _cc): + _cc.is_serviced = False + _cc.service_id = "" + daemon_stats = DaemonStats() - self.daemon_stats.config( "name", "monitor", None, None, None) + daemon_stats.config("name", "monitor", None, None, None) self.assertEqual( - {'daemon': 'name', 'internal': True, 'monitor': 'monitor', 'metricType': 'type'}, - self.daemon_stats._tags("type") + { + "daemon": "name", + "internal": True, + "monitor": "monitor", + "metricType": "type", + }, + daemon_stats._tags("type"), ) + def test_suite(): - return unittest.TestSuite(( - unittest.makeSuite(DaemonStatsTest), - )) + return unittest.TestSuite((unittest.makeSuite(DaemonStatsTest),)) + -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/Products/ZenUtils/tests/test_MySqlZodbFactory.py b/Products/ZenUtils/tests/test_MySqlZodbFactory.py new file mode 100644 index 0000000000..b594f2d3bd --- /dev/null +++ b/Products/ZenUtils/tests/test_MySqlZodbFactory.py @@ -0,0 +1,83 @@ +############################################################################## +# +# Copyright (C) Zenoss, Inc. 2024, all rights reserved. +# +# This content is made available according to terms specified in +# License.zenoss under the directory where your Zenoss product is installed. +# +############################################################################## + +from __future__ import absolute_import, print_function + +import logging + +from unittest import TestCase + +from mock import call, Mock, patch + +from Products.ZenUtils.MySqlZodbFactory import ( + MySQLdb, + _get_storage, + _OPERATIONAL_ERROR_RETRY_DELAY, +) + +PATH = {"src": "Products.ZenUtils.MySqlZodbFactory"} + + +class TestGetStorage(TestCase): + """Test the _get_storage function.""" + + def setUp(t): + log = logging.getLogger() + log.setLevel(logging.FATAL + 1) + + def tearDown(t): + log = logging.getLogger() + log.setLevel(logging.NOTSET) + + @patch("{src}.relstorage.storage.RelStorage".format(**PATH), autospec=True) + def test_nominal(t, relstorage_): + params = {"a": 1} + adapter = Mock() + + storage = _get_storage(adapter, params) + + t.assertEqual(storage, relstorage_.return_value) + relstorage_.assert_called_with(adapter, a=1) + + @patch("{src}.time".format(**PATH), autospec=True) + @patch("{src}.relstorage.storage.RelStorage".format(**PATH), autospec=True) + def test_operational_error(t, relstorage_, time_): + params = {"a": 1} + adapter = Mock() + + ex = MySQLdb.OperationalError() + relstorage_.side_effect = ex + + sleep_calls = ( + call(_OPERATIONAL_ERROR_RETRY_DELAY), + call(_OPERATIONAL_ERROR_RETRY_DELAY), + call(_OPERATIONAL_ERROR_RETRY_DELAY), + ) + + storage = _get_storage(adapter, params) + + t.assertIsNone(storage) + time_.sleep.assert_has_calls(sleep_calls) + t.assertEqual(len(sleep_calls), relstorage_.call_count) + t.assertEqual(len(sleep_calls), time_.sleep.call_count) + + @patch("{src}.time".format(**PATH), autospec=True) + @patch("{src}.relstorage.storage.RelStorage".format(**PATH), autospec=True) + def test_unexpected_error(t, relstorage_, time_): + params = {"a": 1} + adapter = Mock() + + ex = Exception() + relstorage_.side_effect = ex + + storage = _get_storage(adapter, params) + + t.assertIsNone(storage) + t.assertEqual(1, relstorage_.call_count) + t.assertEqual(0, time_.call_count) diff --git a/Products/ZenUtils/zenpack.py b/Products/ZenUtils/zenpack.py index 0abecf6c56..d6b31227d1 100644 --- a/Products/ZenUtils/zenpack.py +++ b/Products/ZenUtils/zenpack.py @@ -278,7 +278,7 @@ def path(self, *parts): # by ZPLSkins loader. skinsSubdir = zenPath('Products', packName, 'skins', packName) if not os.path.exists(skinsSubdir): - os.makedirs(skinsSubdir, 0750) + os.makedirs(skinsSubdir, 0o750) self.install(packName) elif self.options.fetch: @@ -761,11 +761,11 @@ def extract(self, fname): if name.endswith('~'): continue if name.endswith('/'): if not os.path.exists(fullname): - os.makedirs(fullname, 0750) + os.makedirs(fullname, 0o750) else: base = os.path.dirname(fullname) if not os.path.isdir(base): - os.makedirs(base, 0750) + os.makedirs(base, 0o750) file(fullname, 'wb').write(zf.read(name)) return packName diff --git a/Products/ZenWidgets/PersistentMessage.py b/Products/ZenWidgets/PersistentMessage.py index 073f4f5d76..8ace7de7b8 100644 --- a/Products/ZenWidgets/PersistentMessage.py +++ b/Products/ZenWidgets/PersistentMessage.py @@ -1,35 +1,45 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -__doc__ = """ +""" This is in a separate module to prevent recursive import. """ + import cgi import time -from zope.interface import implements + +from zope.interface import implementer from Products.ZenModel.ZenModelRM import ZenModelRM -from Products.ZenRelations.RelSchema import * -from Products.ZenWidgets.interfaces import IMessage -from Products.ZenWidgets.messaging import INFO +from Products.ZenRelations.RelSchema import ToManyCont, ToOne +from .interfaces import IMessage +from .messaging import INFO + + +@implementer(IMessage) class PersistentMessage(ZenModelRM): + """A single message. + + Messages are stored as relations on UserSettings and in the session object. """ - A single message. Messages are stored as relations on UserSettings and in - the session object. - """ - implements(IMessage) - _relations = (("messageQueue", ToOne( - ToManyCont, "Products.ZenModel.UserSettings.UserSettings", "messages") - ),) + _relations = ( + ( + "messageQueue", + ToOne( + ToManyCont, + "Products.ZenModel.UserSettings.UserSettings", + "messages", + ), + ), + ) title = None body = None @@ -38,8 +48,7 @@ class PersistentMessage(ZenModelRM): _read = False def __init__(self, id, title, body, priority=INFO, image=None): - """ - Initialization method. + """Initialize an PersistentMessage instance. @param title: The message title @type title: str @@ -58,13 +67,9 @@ def __init__(self, id, title, body, priority=INFO, image=None): self.timestamp = time.time() def mark_as_read(self): - """ - Mark this message as read. - """ + """Mark this message as read.""" self._read = True def delete(self): - """ - Delete this message from the system. - """ + """Delete this message from the system.""" self.__primary_parent__._delObject(self.id) diff --git a/Products/ZenWidgets/Portlet.py b/Products/ZenWidgets/Portlet.py index 49f1402586..6fd6af4ec7 100644 --- a/Products/ZenWidgets/Portlet.py +++ b/Products/ZenWidgets/Portlet.py @@ -1,30 +1,30 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import logging -log = logging.getLogger('zen.Portlet') +from os.path import basename, exists from string import Template +from AccessControl.class_init import InitializeClass + +from Products.ZenModel.ZenModelRM import ZenModelRM from Products.ZenModel.ZenossSecurity import ZEN_COMMON -from os.path import basename, exists from Products.ZenRelations.RelSchema import ToManyCont, ToOne -from Products.ZenModel.ZenModelRM import ZenModelRM -from AccessControl.class_init import InitializeClass from Products.ZenUtils.Utils import zenPath +log = logging.getLogger("zen.Portlet") + + def manage_addPortlet(self, context, REQUEST=None): - """ - Add a portlet. - """ - pass + """Add a portlet.""" + class Portlet(ZenModelRM): """ @@ -34,29 +34,42 @@ class Portlet(ZenModelRM): Portlets should not be instantiated directly. They should only be created by a PortletManager object. """ - source = '' + + source = "" height = 200 - portal_type = meta_type = 'Portlet' + portal_type = meta_type = "Portlet" _relations = ( - ("portletManager", ToOne( - ToManyCont, "Products.ZenWidgets.PortletManager", "portlets")), + ( + "portletManager", + ToOne( + ToManyCont, "Products.ZenWidgets.PortletManager", "portlets" + ), + ), ) _properties = ( - {'id':'title','type':'string','mode':'w'}, - {'id':'description', 'type':'string', 'mode':'w'}, - {'id':'permission', 'type':'string', 'mode':'w'}, - {'id':'sourcepath', 'type':'string', 'mode':'w'}, - {'id':'preview', 'type':'string', 'mode':'w'}, - {'id':'height', 'type':'int', 'mode':'w'}, + {"id": "title", "type": "string", "mode": "w"}, + {"id": "description", "type": "string", "mode": "w"}, + {"id": "permission", "type": "string", "mode": "w"}, + {"id": "sourcepath", "type": "string", "mode": "w"}, + {"id": "preview", "type": "string", "mode": "w"}, + {"id": "height", "type": "int", "mode": "w"}, ) - - def __init__(self, sourcepath, id='', title='', description='', - preview='', height=200, permission=ZEN_COMMON): - if not id: id = basename(sourcepath).split('.')[0] + def __init__( + self, + sourcepath, + id="", + title="", + description="", + preview="", + height=200, + permission=ZEN_COMMON, + ): + if not id: + id = basename(sourcepath).split(".")[0] self.id = id ZenModelRM.__init__(self, id) self.title = title @@ -75,30 +88,38 @@ def check(self): def _read_source(self): try: - path = self.sourcepath if exists(self.sourcepath) else self._getSourcePath() + path = ( + self.sourcepath + if exists(self.sourcepath) + else self._getSourcePath() + ) f = file(path) except IOError as ex: log.error("Unable to load portlet from '%s': %s", path, ex) return else: - tvars = {'portletId': self.id, - 'portletTitle': self.title, - 'portletHeight': self.height} + tvars = { + "portletId": self.id, + "portletTitle": self.title, + "portletHeight": self.height, + } self.source = Template(f.read()).safe_substitute(tvars) f.close() - def getPrimaryPath(self,fromNode=None): - """ - Override the default, which doesn't account for things on zport - """ - return ('', 'zport') + super(Portlet, self).getPrimaryPath(fromNode) + def getPrimaryPath(self, fromNode=None): + """Override the default, which doesn't account for things on zport.""" + return ("", "zport") + super(Portlet, self).getPrimaryPath(fromNode) def get_source(self, debug_mode=False): - if debug_mode: self._read_source() + if debug_mode: + self._read_source() src = [] src.append(self.source) - src.append("YAHOO.zenoss.portlet.register_portlet('%s', '%s');" % ( - self.id, self.title)) - return '\n'.join(src) + src.append( + "YAHOO.zenoss.portlet.register_portlet('%s', '%s');" + % (self.id, self.title) + ) + return "\n".join(src) + InitializeClass(Portlet) diff --git a/Products/ZenWidgets/PortletManager.py b/Products/ZenWidgets/PortletManager.py index 042d504d92..bdd8b61b7e 100644 --- a/Products/ZenWidgets/PortletManager.py +++ b/Products/ZenWidgets/PortletManager.py @@ -1,37 +1,41 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -import os import md5 +import os from Globals import DevelopmentMode from AccessControl import getSecurityManager from AccessControl.class_init import InitializeClass -from Products.ZenRelations.RelSchema import ToManyCont, ToOne -from Products.ZenModel.ZenModelRM import ZenModelRM + from Products.ZenMessaging.audit import audit +from Products.ZenModel.ZenModelRM import ZenModelRM from Products.ZenModel.ZenossSecurity import ZEN_COMMON -from Products.ZenWidgets import messaging +from Products.ZenRelations.RelSchema import ToManyCont, ToOne from Products.Zuul.utils import ZuulMessageFactory as _t -from Portlet import Portlet +from . import messaging +from .Portlet import Portlet + + +def getuid(): + return md5.md5(os.urandom(10)).hexdigest()[:8] -getuid = lambda:md5.md5(os.urandom(10)).hexdigest()[:8] -class DuplicatePortletRegistration(Exception): pass +class DuplicatePortletRegistration(Exception): + pass + def manage_addPortletManager(context, id="", REQUEST=None): - """ - Create a portlet manager under context. - """ - if not id: id = "ZenPortletManager" + """Create a portlet manager under context.""" + if not id: + id = "ZenPortletManager" zpm = PortletManager(id) context._setObject(id, zpm) zpm = context._getOb(id) @@ -39,41 +43,72 @@ def manage_addPortletManager(context, id="", REQUEST=None): class PortletManager(ZenModelRM): - """ - A registry for portlet source and metadata. Provides access functions and - handles portlet permissions. + """A registry for portlet source and metadata. + + Provides access functions and handles portlet permissions. """ - portal_type = meta_type = 'PortletManager' + portal_type = meta_type = "PortletManager" _relations = ( - ("portlets", ToManyCont(ToOne, "Products.ZenWidgets.Portlet", - "portletManager")), + ( + "portlets", + ToManyCont(ToOne, "Products.ZenWidgets.Portlet", "portletManager"), + ), ) - - def register_extjsPortlet(self, id, title, height=200, permission=ZEN_COMMON): - """ - Registers an ExtJS portlet - """ - ppath = os.path.join('Products','ZenWidgets','ZenossPortlets','ExtPortlet.js') - self.register_portlet(ppath, id=id, title=_t(title), height=height, - permission=permission) - def register_portlet(self, sourcepath, id='', title='', description='', - preview='', height=200, permission=ZEN_COMMON): + def register_extjsPortlet( + self, id, title, height=200, permission=ZEN_COMMON + ): + """Registers an ExtJS portlet.""" + ppath = os.path.join( + "Products", "ZenWidgets", "ZenossPortlets", "ExtPortlet.js" + ) + self.register_portlet( + ppath, id=id, title=_t(title), height=height, permission=permission + ) + + def register_portlet( + self, + sourcepath, + id="", + title="", + description="", + preview="", + height=200, + permission=ZEN_COMMON, + ): """ Registers a new source file and creates an associated Portlet to store the metadata and provide access methods. """ p = self.find(id, sourcepath) if p: - old_values = (p.sourcepath, p.id, p.title, p.description, p.preview, p.height, p.permission) - new_values = (sourcepath, id, _t(title), description, preview, height, permission) + old_values = ( + p.sourcepath, + p.id, + p.title, + p.description, + p.preview, + p.height, + p.permission, + ) + new_values = ( + sourcepath, + id, + _t(title), + description, + preview, + height, + permission, + ) if old_values == new_values: # Portlet unchanged - don't re-register return self.unregister_portlet(p.id) - p = Portlet(sourcepath, id, _t(title), description, preview, height, permission) + p = Portlet( + sourcepath, id, _t(title), description, preview, height, permission + ) self.portlets._setObject(id, p) def unregister_portlet(self, id): @@ -87,22 +122,24 @@ def get_portlets(self): user = getSecurityManager().getUser() dmd = self.dmd.primaryAq() return filter( - lambda x:user.has_permission(x.permission, dmd) and x.check(), - self.portlets()) + lambda x: user.has_permission(x.permission, dmd) and x.check(), + self.portlets(), + ) - def find(self, id='', sourcepath=''): - """ - Look for a registered portlet with an id or source path. - """ + def find(self, id="", sourcepath=""): + """Look for a registered portlet with an id or source path.""" for portlet in self.portlets(): - # special case for ExtJs portlets which will all have the same sourcepath - if portlet.id==id or (portlet.sourcepath==sourcepath and not 'ExtPortlet' in sourcepath): return portlet + # Special case for ExtJs portlets which will all have the same + # sourcepath. + if portlet.id == id or ( + portlet.sourcepath == sourcepath + and "ExtPortlet" not in sourcepath + ): + return portlet return None def update_source(self, REQUEST=None): - """ - Reread the source files for all portlets. - """ + """Reread the source files for all portlets.""" for portlet in self.portlets(): portlet._read_source() @@ -112,27 +149,28 @@ def get_source(self, REQUEST=None): javascript file. """ srcs = [x.get_source(DevelopmentMode) for x in self.get_portlets()] - srcs.append('YAHOO.register("portletsource", YAHOO.zenoss.portlet, {})') + srcs.append( + 'YAHOO.register("portletsource", YAHOO.zenoss.portlet, {})' + ) if REQUEST: - REQUEST.response.headers['Content-Type'] = 'text/javascript' - return '\n'.join(srcs) + REQUEST.response.headers["Content-Type"] = "text/javascript" + return "\n".join(srcs) def edit_portlet_perms(self, REQUEST=None): - """ - Update the portlet permissions - """ + """Update the portlet permissions.""" for portlet in REQUEST.form: - if not portlet.endswith('_permission'): continue - portname = portlet.split('_')[0] + if not portlet.endswith("_permission"): + continue + portname = portlet.split("_")[0] p = self.find(id=portname) p.permission = REQUEST.form[portlet] - if REQUEST: + if REQUEST: messaging.IMessageSender(self).sendToBrowser( - 'Permissions Saved', - "Saved At: %s" % self.getCurrentUserNowString() + "Permissions Saved", + "Saved At: %s" % self.getCurrentUserNowString(), ) - REQUEST['RESPONSE'].redirect('/zport/dmd/editPortletPerms') - audit('UI.Portlet.Edit', data_=REQUEST.form) + REQUEST["RESPONSE"].redirect("/zport/dmd/editPortletPerms") + audit("UI.Portlet.Edit", data_=REQUEST.form) InitializeClass(PortletManager) diff --git a/Products/ZenWidgets/ZenTableManager.py b/Products/ZenWidgets/ZenTableManager.py index c0352603d7..93ac63b121 100644 --- a/Products/ZenWidgets/ZenTableManager.py +++ b/Products/ZenWidgets/ZenTableManager.py @@ -7,74 +7,75 @@ # ############################################################################## - """ZenTableManager -ZenTableManager is a Zope Product that helps manage and display -large sets of tabular data. It allows for column sorting, -break down of the set into pages, and filtering of elements -in the table. It also allows users to store their own default -page size (but publishes a hook to get this values from -a different location). +ZenTableManager is a Zope Product that helps manage and display large sets of +tabular data. It allows for column sorting, break down of the set into pages, +and filtering of elements in the table. It also allows users to store their +own default page size (but publishes a hook to get this values from a +different location). """ import logging import math import re -import ZTUtils import urllib -from AccessControl.class_init import InitializeClass + +import ZTUtils + from Acquisition import aq_base -from OFS.SimpleItem import SimpleItem -from OFS.PropertyManager import PropertyManager +from AccessControl.class_init import InitializeClass from DocumentTemplate.sequence.SortEx import sort +from OFS.PropertyManager import PropertyManager +from OFS.SimpleItem import SimpleItem from persistent.dict import PersistentDict -from ZenTableState import ZenTableState from Products.ZenUtils.Utils import getTranslation -log = logging.getLogger('zen.ZenTableManager') +from .ZenTableState import ZenTableState + +log = logging.getLogger("zen.ZenTableManager") + -class TableStateNotFound(Exception): pass +class TableStateNotFound(Exception): + pass -def convert(x): - return 0.0 if isinstance(x, float) and math.isnan(x) else x +def convert(x): + return 0.0 if isinstance(x, float) and math.isnan(x) else x -def zencmp(o1, o2): - return cmp(convert(o1), convert(o2)) +def zencmp(o1, o2): + return cmp(convert(o1), convert(o2)) -def manage_addZenTableManager(context, id="", REQUEST = None): - """make a CVDeviceLoader""" - if not id: id = "ZenTableManager" + +def manage_addZenTableManager(context, id="", REQUEST=None): + """Make a CVDeviceLoader.""" + if not id: + id = "ZenTableManager" ztm = ZenTableManager(id) context._setObject(id, ztm) ztm = context._getOb(id) ztm.initTableManagerSkins() if REQUEST is not None: - REQUEST.RESPONSE.redirect(context.absolute_url_path() - +'/manage_main') + REQUEST.RESPONSE.redirect(context.absolute_url_path() + "/manage_main") + class ZenTableManager(SimpleItem, PropertyManager): - """ZenTableManager manages display of tabular data""" + """ZenTableManager manages display of tabular data.""" - portal_type = meta_type = 'ZenTableManager' + portal_type = meta_type = "ZenTableManager" _properties = ( - {'id':'defaultBatchSize', 'type':'int','mode':'w'}, - {'id':'abbrStartLabel', 'type':'int','mode':'w'}, - {'id':'abbrEndLabel', 'type':'int','mode':'w'}, - {'id':'abbrPadding', 'type':'int','mode':'w'}, - {'id':'abbrSeparator', 'type':'string','mode':'w'}, + {"id": "defaultBatchSize", "type": "int", "mode": "w"}, + {"id": "abbrStartLabel", "type": "int", "mode": "w"}, + {"id": "abbrEndLabel", "type": "int", "mode": "w"}, + {"id": "abbrPadding", "type": "int", "mode": "w"}, + {"id": "abbrSeparator", "type": "string", "mode": "w"}, ) - manage_options = ( - PropertyManager.manage_options + - SimpleItem.manage_options - ) - + manage_options = PropertyManager.manage_options + SimpleItem.manage_options def __init__(self, id): self.id = id @@ -83,9 +84,9 @@ def __init__(self, id): self.abbrEndLabel = 5 self.abbrPadding = 5 self.abbrSeparator = ".." - self.abbrThresh = self.abbrStartLabel + \ - self.abbrEndLabel + self.abbrPadding - + self.abbrThresh = ( + self.abbrStartLabel + self.abbrEndLabel + self.abbrPadding + ) def getDefaultBatchSize(self): dbs = self.defaultBatchSize @@ -94,80 +95,80 @@ def getDefaultBatchSize(self): dbs = zu.getUserSettings().defaultPageSize return dbs - def setupTableState(self, tableName, **keys): - """initialize or setup the session variable to track table state""" + """Initialize or setup the session variable to track table state.""" tableState = self.getTableState(tableName, **keys) request = self.REQUEST tableState.updateFromRequest(request) return tableState - def getTableState(self, tableName, attrname=None, default=None, **keys): - """return an existing table state or a single value from the state""" + """Return an existing table state or a single value from the state.""" from Products.ZenUtils.Utils import unused + unused(default) request = self.REQUEST tableStates = self.getTableStates() tableState = tableStates.get(tableName, None) if not tableState: dbs = self.getDefaultBatchSize() - tableStates[tableName] = ZenTableState(request,tableName,dbs,**keys) + tableStates[tableName] = ZenTableState( + request, tableName, dbs, **keys + ) tableState = tableStates[tableName] - if attrname == None: + if attrname is None: return tableStates[tableName] return getattr(tableState, attrname, None) - def getReqTableState(self, tableName, attrname): """ Return attrname from request if present if not return from tableState. """ request = self.REQUEST - if request.has_key(attrname): + if request.has_key(attrname): # noqa W601 return request[attrname] return self.getTableState(tableName, attrname) - def setTableState(self, tableName, attrname, value): """Set the value of a table state attribute and return it.""" tableState = self.getTableState(tableName) return tableState.setTableState(attrname, value) - def setReqTableState(self, tableName, attrname, default=None, reset=False): - """set the a value in the table state from the request""" + """Set the a value in the table state from the request.""" tableState = self.getTableState(tableName) value = self.REQUEST.get(attrname, None) tableState = self.getTableState(tableName) - return tableState.setTableState(attrname, value, - default=default, reset=reset) - + return tableState.setTableState( + attrname, value, default=default, reset=reset + ) def deleteTableState(self, tableName): - """delete an existing table state""" + """Delete an existing table state.""" tableStates = self.getTableStates() if tableName in tableStates: del tableStates[tableName] - def getBatch(self, tableName, objects, **keys): - """Filter, sort and batch objects and pass return set. - """ + """Filter, sort and batch objects and pass return set.""" if log.isEnabledFor(logging.DEBUG): import os - fmt = 'getBatch pid=%s, tableName=%s, %s objects' + + fmt = "getBatch pid=%s, tableName=%s, %s objects" pid = os.getpid() log.debug(fmt, pid, tableName, len(objects)) if not objects: objects = [] tableState = self.setupTableState(tableName, **keys) if tableState.onlyMonitored and objects: - objects = [o for o in objects if getattr(o, 'isMonitored', o.monitored)()] + objects = [ + o for o in objects if getattr(o, "isMonitored", o.monitored)() + ] if tableState.filter and objects: - objects = self.filterObjects(objects, tableState.filter, - tableState.filterFields) + objects = self.filterObjects( + objects, tableState.filter, tableState.filterFields + ) # objects is frequently a generator. Need a list in order to sort if not isinstance(objects, list): objects = list(objects) @@ -175,53 +176,57 @@ def getBatch(self, tableName, objects, **keys): objects = self.sortObjects(objects, tableState) tableState.totalobjs = len(objects) tableState.buildPageNavigation(objects) - if not hasattr(self.REQUEST, 'doExport'): - objects = ZTUtils.Batch(objects, - tableState.batchSize or len(objects), - start=tableState.start, orphan=0) + if not hasattr(self.REQUEST, "doExport"): + objects = ZTUtils.Batch( + objects, + tableState.batchSize or len(objects), + start=tableState.start, + orphan=0, + ) return objects - def getBatchForm(self, objects, request): - """Create batch based on objects no sorting for filter applied. - """ - batchSize = request.get('batchSize',self.defaultBatchSize) - if batchSize in ['', '0']: + """Create batch based on objects no sorting for filter applied.""" + batchSize = request.get("batchSize", self.defaultBatchSize) + if batchSize in ["", "0"]: batchSize = 0 else: batchSize = int(batchSize) - start = int(request.get('start',0)) - resetStart = int(request.get('resetStart',0)) - lastindex = request.get('lastindex',0) - navbutton = request.get('navbutton',None) + start = int(request.get("start", 0)) + resetStart = int(request.get("resetStart", 0)) + lastindex = request.get("lastindex", 0) + navbutton = request.get("navbutton", None) if navbutton == "first" or resetStart: start = 0 elif navbutton == "last": - start=lastindex + start = lastindex elif navbutton == "next": start = start + batchSize - if start > lastindex: start = lastindex + if start > lastindex: + start = lastindex elif navbutton == "prev": start = start - batchSize - elif request.has_key("nextstart"): + elif request.has_key("nextstart"): # noqa W601 start = request.nextstart - if 0 < start > len(objects): start = 0 + if 0 < start > len(objects): + start = 0 request.start = start - objects = ZTUtils.Batch(objects, batchSize or len(objects), - start=request.start, orphan=0) + objects = ZTUtils.Batch( + objects, batchSize or len(objects), start=request.start, orphan=0 + ) return objects - def filterObjects(self, objects, regex, filterFields): - """filter objects base on a regex in regex and list of fields + """Filter objects base on a regex in regex and list of fields in filterFields.""" - if self.REQUEST.SESSION.has_key('message'): - self.REQUEST.SESSION.delete('message') + if self.REQUEST.SESSION.has_key("message"): # noqa W601 + self.REQUEST.SESSION.delete("message") if not regex: return objects - try: search = re.compile(regex,re.I).search + try: + search = re.compile(regex, re.I).search except re.error: - self.REQUEST.SESSION['message'] = "Invalid regular expression." + self.REQUEST.SESSION["message"] = "Invalid regular expression." return objects filteredObjects = [] for obj in objects: @@ -237,61 +242,78 @@ def filterObjects(self, objects, regex, filterFields): value = str(value) target.append(value) targetstring = " ".join(target) - if search(targetstring): filteredObjects.append(obj) + if search(targetstring): + filteredObjects.append(obj) return filteredObjects - def evaluateTales(self, expression, dev): log.warning("evaluating %s", dev.__dict__) - variables_and_funcs = { - 'device':dev, 'dev':dev - } - expression = expression.replace('python:', 'attr=') + variables_and_funcs = {"device": dev, "dev": dev} + expression = expression.replace("python:", "attr=") try: - exec(expression, variables_and_funcs) - attr = variables_and_funcs['attr'] + exec (expression, variables_and_funcs) + attr = variables_and_funcs["attr"] log.warning("attr is %s", attr) except Exception as ex: attr = str(ex) return attr def sortObjects(self, objects, request): - """Sort objects. - """ + """Sort objects.""" + def dictAwareSort(objects, field, rule, sence): if not objects: return objects + class Wrapper: def __init__(self, field, cargo): - if callable(field): field = field() - #make sorting case-insensitive - if isinstance(field, basestring): field = field.lower() + if callable(field): + field = field() + # make sorting case-insensitive + if isinstance(field, basestring): + field = field.lower() self.field = field self.cargo = cargo + if field.startswith("python:"): - objects = [Wrapper(self.evaluateTales(field, o), o) for o in objects] + objects = [ + Wrapper(self.evaluateTales(field, o), o) for o in objects + ] else: if isinstance(objects[0], dict): - objects = [Wrapper(o.get(field, ''), o) for o in objects] + objects = [Wrapper(o.get(field, ""), o) for o in objects] else: - objects = [Wrapper(getattr(o, field, ''), o) for o in objects] - objects = sort(objects, (('field', rule, sence),), {'zencmp': zencmp}) + objects = [ + Wrapper(getattr(o, field, ""), o) for o in objects + ] + objects = sort( + objects, (("field", rule, sence),), {"zencmp": zencmp} + ) return [w.cargo for w in objects] - if (getattr(aq_base(request), 'sortedHeader', False) - and getattr(aq_base(request),"sortedSence", False)): + if getattr(aq_base(request), "sortedHeader", False) and getattr( + aq_base(request), "sortedSence", False + ): sortedHeader = request.sortedHeader sortedSence = request.sortedSence sortRule = getattr(aq_base(request), "sortRule", "cmp") - objects = dictAwareSort(objects, sortedHeader, sortRule, sortedSence) + objects = dictAwareSort( + objects, sortedHeader, sortRule, sortedSence + ) return objects - - def getTableHeader(self, tableName, fieldName, fieldTitle, - sortRule='cmp', style='tableheader',attributes="", - i18n_domain='zenoss'): - """generate a tag that allows column sorting""" + def getTableHeader( + self, + tableName, + fieldName, + fieldTitle, + sortRule="cmp", + style="tableheader", + attributes="", + i18n_domain="zenoss", + ): + """Generate a tag that allows column sorting.""" href = self.getTableHeaderHref(tableName, fieldName, sortRule) style = self.getTableHeaderStyle(tableName, fieldName, style) tag = """""" % (style, attributes) @@ -299,58 +321,60 @@ def getTableHeader(self, tableName, fieldName, fieldTitle, # Owwwwwwwwwww from Products.Zuul.utils import ZuulMessageFactory as _t + msg = getTranslation(_t(fieldTitle), self.REQUEST, domain=i18n_domain) tag += msg + "\n" return tag - - def getTableHeaderHref(self, tableName, fieldName, - sortRule='cmp',params=""): - """build the href attribute for the table headers""" - + def getTableHeaderHref( + self, tableName, fieldName, sortRule="cmp", params="" + ): + """Build the href attribute for the table headers.""" tableState = self.getTableState(tableName) sortedHeader = tableState.sortedHeader sortedSence = tableState.sortedSence if sortedHeader == fieldName: - if sortedSence == 'asc': - sortedSence = 'desc' - elif sortedSence == 'desc': - sortedSence = 'asc' + if sortedSence == "asc": + sortedSence = "desc" + elif sortedSence == "desc": + sortedSence = "asc" else: - sortedSence = 'asc' + sortedSence = "asc" href = "%s?tableName=%s&sortedHeader=%s&" % ( - self.REQUEST.URL, tableName, urllib.quote_plus(fieldName)) - href += "sortedSence=%s&sortRule=%s%s\">" % ( - sortedSence, sortRule, params) + self.REQUEST.URL, + tableName, + urllib.quote_plus(fieldName), + ) + href += 'sortedSence=%s&sortRule=%s%s">' % ( + sortedSence, + sortRule, + params, + ) tableState.addFilterField(fieldName) return href - def getTableHeaderStyle(self, tableName, fieldName, style): """apends "selected" onto the CSS style if this field is selected""" if self.getTableState(tableName, "sortedHeader") == fieldName: style = style + "selected" return style - def getTableStates(self): session = self.REQUEST.SESSION try: - return session['zentablestates'] + return session["zentablestates"] except KeyError: init = PersistentDict() - session['zentablestates'] = init + session["zentablestates"] = init return init - def tableStatesHasTable(self, tableName): - return self.getTableStates().has_key(tableName) - + return tableName in self.getTableStates() def getNavData(self, objects, batchSize, sortedHeader): pagenav = [] - if batchSize in ['', '0']: + if batchSize in ["", "0"]: batchSize = 0 else: batchSize = int(batchSize) @@ -358,55 +382,59 @@ def getNavData(self, objects, batchSize, sortedHeader): if sortedHeader: label = self._buildTextLabel(objects[index], sortedHeader) elif batchSize: - label = str(1+index/batchSize) + label = str(1 + index / batchSize) else: - label = '1' - pagenav.append({ 'label': label, 'index': index }) + label = "1" + pagenav.append({"label": label, "index": index}) return pagenav - def _buildTextLabel(self, item, sortedHeader): endAbbr = "" attr = getattr(item, sortedHeader, "") - if callable(attr): attr = attr() + if callable(attr): + attr = attr() label = str(attr) if len(label) > self.abbrThresh: - startAbbr = label[:self.abbrStartLabel] + startAbbr = label[: self.abbrStartLabel] if self.abbrEndLabel > 0: - endAbbr = label[-self.abbrEndLabel:] + endAbbr = label[-self.abbrEndLabel :] label = "".join((startAbbr, self.abbrSeparator, endAbbr)) return label - def initTableManagerSkins(self): """setup the skins that come with ZenTableManager""" - layers = ('zentablemanager','zenui') + layers = ("zentablemanager", "zenui") try: import string from Products.CMFCore.utils import getToolByName from Products.CMFCore.DirectoryView import addDirectoryViews - skinstool = getToolByName(self, 'portal_skins') + + skinstool = getToolByName(self, "portal_skins") for layer in layers: if layer not in skinstool.objectIds(): - addDirectoryViews(skinstool, 'skins', globals()) + addDirectoryViews(skinstool, "skins", globals()) skins = skinstool.getSkinSelections() for skin in skins: path = skinstool.getSkinPath(skin) - path = map(string.strip, string.split(path,',')) + path = map(string.strip, string.split(path, ",")) for layer in layers: if layer not in path: try: - path.insert(path.index('custom')+1, layer) + path.insert(path.index("custom") + 1, layer) except ValueError: path.append(layer) - path = ','.join(path) + path = ",".join(path) skinstool.addSkinSelection(skin, path) except ImportError as e: - if "Products.CMFCore.utils" in e.args: pass - else: raise + if "Products.CMFCore.utils" in e.args: + pass + else: + raise except AttributeError as e: - if "portal_skin" in e.args: pass - else: raise + if "portal_skin" in e.args: + pass + else: + raise InitializeClass(ZenTableManager) diff --git a/Products/ZenWidgets/ZenTableState.py b/Products/ZenWidgets/ZenTableState.py index 42e2a7bd0c..9c773eb2df 100644 --- a/Products/ZenWidgets/ZenTableState.py +++ b/Products/ZenWidgets/ZenTableState.py @@ -7,32 +7,29 @@ # ############################################################################## - -__doc__="""ZenTableState +"""ZenTableState Track the state of a given table. +""" -$Id: ZenTableState.py,v 1.3 2004/01/17 04:56:13 edahl Exp $""" - -__revision__ = "$Revision: 1.3 $"[11:-2] - -from AccessControl.class_init import InitializeClass from AccessControl import ClassSecurityInfo +from AccessControl.class_init import InitializeClass from DateTime.DateTime import DateTime from persistent.dict import PersistentDict + class ZenTableState: - defaultValue = "" # So that we don't have to clear the session + defaultValue = "" # So that we don't have to clear the session changesThatResetStart = [ "batchSize", "filter", "sortedHeader", "sortedSence", - "defaultValue" - "onlyMonitored" - ] + "defaultValue", + "onlyMonitored", + ] requestAtts = [ "batchSize", @@ -45,18 +42,18 @@ class ZenTableState: "URL", "defaultValue", "onlyMonitored", - "generate" - ] + "generate", + ] security = ClassSecurityInfo() - #this session info isn't anything worth protecting - security.setDefaultAccess('allow') + # this session info isn't anything worth protecting + security.setDefaultAccess("allow") def __init__(self, request, tableName, defaultBatchSize, **keys): self.URL = request.URL self.tableName = tableName self.sortedHeader = "primarySortKey" - self.sortedSence="asc" + self.sortedSence = "asc" self.sortRule = "cmp" self.onlyMonitored = 0 self.defaultBatchSize = defaultBatchSize @@ -71,8 +68,9 @@ def __init__(self, request, tableName, defaultBatchSize, **keys): self.abbrEndLabel = 5 self.abbrPadding = 5 self.abbrSeparator = ".." - self.abbrThresh = self.abbrStartLabel + \ - self.abbrEndLabel + self.abbrPadding + self.abbrThresh = ( + self.abbrStartLabel + self.abbrEndLabel + self.abbrPadding + ) self.tableClass = "tableheader" self.resetStart = False self.showAll = False @@ -102,100 +100,111 @@ def setTableStateFromKeys(self, keys): if key not in self.requestAtts: self.requestAtts.append(key) - def updateFromRequest(self, request): """update table state based on request""" - states = request.SESSION['zentablestates'] + states = request.SESSION["zentablestates"] if not isinstance(states, PersistentDict): - request.SESSION['zentablestates'] = PersistentDict(states) - request.SESSION['zentablestates']._p_changed = True + request.SESSION["zentablestates"] = PersistentDict(states) + request.SESSION["zentablestates"]._p_changed = True if self.URL != request.URL: self.batchSize = self.defaultBatchSize - self.start=0 - self.filter = '' - - # 'tableName' will be empty on GET requests, therefore we check for the 'showAll' option here - if request.get("showAll", False) or "showAll=true" in request.get("QUERY_STRING") or request.get("adapt", False or "adapt=false" in request.get("QUERY_STRING")): - if not request.get('tableName', None): + self.start = 0 + self.filter = "" + + # 'tableName' will be empty on GET requests, therefore we check for + # the 'showAll' option here. + if ( + request.get("showAll", False) + or "showAll=true" in request.get("QUERY_STRING") + or request.get( + "adapt", False or "adapt=false" in request.get("QUERY_STRING") + ) + ): + if not request.get("tableName", None): self.showAll = True self.start = 0 self.batchSize = 0 - # the batch size needs to be set to the total object/result count. - # we don't have the objects here, so we will set the batchSize - # where we do have the objects -- see buildPageNavigation() below. + # The batch size needs to be set to the total object/result + # count. We don't have the objects here, so we will set the + # batchSize where we do have the objects -- see + # buildPageNavigation() below. - if request.get('tableName', None) != self.tableName: + if request.get("tableName", None) != self.tableName: return for attname in self.requestAtts: - if request.has_key(attname): - self.setTableState(attname, int(request[attname]) if attname == 'start' else request[attname], request=request) - if request.get("showAll", False) or "showAll=true" in request.get("QUERY_STRING"): + if request.has_key(attname): # noqa W601 + self.setTableState( + attname, + int(request[attname]) + if attname == "start" + else request[attname], + request=request, + ) + if request.get("showAll", False) or "showAll=true" in request.get( + "QUERY_STRING" + ): self.showAll = True self.start = 0 self.batchSize = 0 - if not request.has_key('onlyMonitored'): - self.setTableState('onlyMonitored', 0) - if request.get("first",False): + if not request.has_key("onlyMonitored"): # noqa W601 + self.setTableState("onlyMonitored", 0) + if request.get("first", False): self.resetStart = True elif request.get("last", False): - self.start=self.lastindex + self.start = self.lastindex elif request.get("next", False): np = self.start + self.batchSize - if np > self.lastindex: self.start = self.lastindex - else: self.start = np + if np > self.lastindex: + self.start = self.lastindex + else: + self.start = np elif request.get("prev", False): pp = self.start - self.batchSize - if pp < 0: self.start = 0 - else: self.start = pp - ourl = "/".join((request.URL,request.get("zenScreenName",""))) + if pp < 0: + self.start = 0 + else: + self.start = pp + ourl = "/".join((request.URL, request.get("zenScreenName", ""))) if self.resetStart or (self.URL != request.URL and self.URL != ourl): self.start = 0 self.resetStart = False - def getPageNavigation(self): return self.pagenav - def buildPageNavigation(self, objects): self.pagenav = [] - # this conditional is for setting the batchSize on a "showAll" - #if self.showAll: - # self.batchSize = len(objects) - # self.start = 0 - # self.showAll = False if self.batchSize == 0: return self.pagenav - lastindex=0 + lastindex = 0 for index in range(0, self.totalobjs, self.batchSize): pg = {} - pg['label'] = self._pageLabel(objects, index) - pg['index'] = index + pg["label"] = self._pageLabel(objects, index) + pg["index"] = index self.pagenav.append(pg) - lastindex=index + lastindex = index self.lastindex = lastindex - def _pageLabel(self, objects, index): - """make label for page navigation if field isn't sorted use page #""" + """Make label for page navigation if field isn't sorted use page #.""" pageLabel = "" # do not show the page label if there is only one page if self.totalobjs > self.batchSize: if self.sortedHeader: pageLabel = self._buildTextLabel(objects[index]) elif self.batchSize: - pageLabel = str(1+index/self.batchSize) + pageLabel = str(1 + index / self.batchSize) else: - pageLabel = '1' + pageLabel = "1" return pageLabel - def _buildTextLabel(self, item): startAbbr = "" endAbbr = "" attr = getattr(item, self.sortedHeader, self.defaultValue) - if callable(attr): attr = attr() + if callable(attr): + attr = attr() if isinstance(attr, DateTime) and not attr.millis(): label = self.defaultValue else: @@ -205,16 +214,17 @@ def _buildTextLabel(self, item): # ensuring that we are always working with a string label = str(label) if len(label) > self.abbrThresh: - startAbbr = label[:self.abbrStartLabel] + startAbbr = label[: self.abbrStartLabel] if self.abbrEndLabel > 0: - endAbbr = label[-self.abbrEndLabel:] + endAbbr = label[-self.abbrEndLabel :] label = "".join((startAbbr, self.abbrSeparator, endAbbr)) return label - - def setTableState(self, attname, value, default=None, reset=False, request=None): - if attname == 'batchSize': - if value in ['', '0']: + def setTableState( + self, attname, value, default=None, reset=False, request=None + ): + if attname == "batchSize": + if value in ["", "0"]: value = 0 else: # If given parameter is not numeric this will catch it @@ -226,20 +236,20 @@ def setTableState(self, attname, value, default=None, reset=False, request=None) # Restore whatever was the previous value value = getattr(self, attname, None) if request is not None: - # Set attribute in request to previous value so it gets stored properly + # Set attribute in request to previous value so it + # gets stored properly. request[attname] = value - if not hasattr(self, attname) and default != None: + if not hasattr(self, attname) and default is not None: setattr(self, attname, default) if reset and attname not in self.changesThatResetStart: self.changesThatResetStart.append(attname) if attname not in self.requestAtts: self.requestAtts.append(attname) - if value != None and getattr(self,attname, None) != value: + if value is not None and getattr(self, attname, None) != value: setattr(self, attname, value) if attname in self.changesThatResetStart: self.resetStart = True - return getattr(self,attname) - + return getattr(self, attname) def addFilterField(self, fieldName): """make sure we only add non-dup filterfields""" diff --git a/Products/ZenWidgets/ZenossPortlets/ZenossPortlets.py b/Products/ZenWidgets/ZenossPortlets/ZenossPortlets.py index 240e512537..610b008fc0 100644 --- a/Products/ZenWidgets/ZenossPortlets/ZenossPortlets.py +++ b/Products/ZenWidgets/ZenossPortlets/ZenossPortlets.py @@ -1,75 +1,82 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import os -from Products.ZenModel.ZenossSecurity import * + +from Products.ZenModel.ZenossSecurity import ( + ZEN_COMMON, + ZEN_MANAGE_DMD, + ZEN_VIEW, +) + def _portletpath(*args): """ Shortcut, since these all live in the same directory. Portlet needs a path relative to $ZENHOME. """ - return os.path.join('Products','ZenWidgets','ZenossPortlets', *args) + return os.path.join("Products", "ZenWidgets", "ZenossPortlets", *args) + portlets = [ { - 'sourcepath': _portletpath('HeartbeatsPortlet.js'), - 'id': 'HeartbeatsPortlet', - 'title': 'Daemon Processes Down', - 'permission': ZEN_MANAGE_DMD + "sourcepath": _portletpath("HeartbeatsPortlet.js"), + "id": "HeartbeatsPortlet", + "title": "Daemon Processes Down", + "permission": ZEN_MANAGE_DMD, }, { - 'sourcepath': _portletpath('GoogleMapsPortlet.js'), - 'id': 'GoogleMapsPortlet', - 'title': 'Google Maps', - 'permission': ZEN_VIEW + "sourcepath": _portletpath("GoogleMapsPortlet.js"), + "id": "GoogleMapsPortlet", + "title": "Google Maps", + "permission": ZEN_VIEW, }, { - 'sourcepath': _portletpath('SiteWindowPortlet.js'), - 'id': 'SiteWindowPortlet', - 'title': 'Site Window', - 'permission': ZEN_VIEW + "sourcepath": _portletpath("SiteWindowPortlet.js"), + "id": "SiteWindowPortlet", + "title": "Site Window", + "permission": ZEN_VIEW, }, { - 'sourcepath': _portletpath('DeviceIssuesPortlet.js'), - 'id': 'DeviceIssuesPortlet', - 'title': 'Device Issues', - 'permission': ZEN_COMMON + "sourcepath": _portletpath("DeviceIssuesPortlet.js"), + "id": "DeviceIssuesPortlet", + "title": "Device Issues", + "permission": ZEN_COMMON, }, { - 'sourcepath': _portletpath('TopLevelOrgsPortlet.js'), - 'id': 'TopLevelOrgsPortlet', - 'title': 'Top Level Organizers', - 'permission': ZEN_VIEW + "sourcepath": _portletpath("TopLevelOrgsPortlet.js"), + "id": "TopLevelOrgsPortlet", + "title": "Top Level Organizers", + "permission": ZEN_VIEW, }, { - 'sourcepath': _portletpath('WatchListPortlet.js'), - 'id': 'WatchListPortlet', - 'title': 'Watch List', - 'permission': ZEN_COMMON + "sourcepath": _portletpath("WatchListPortlet.js"), + "id": "WatchListPortlet", + "title": "Watch List", + "permission": ZEN_COMMON, }, { - 'sourcepath': _portletpath('productionStatePortlet.js'), - 'id': 'ProdStatePortlet', - 'title': 'Production States', - 'permission': ZEN_COMMON + "sourcepath": _portletpath("productionStatePortlet.js"), + "id": "ProdStatePortlet", + "title": "Production States", + "permission": ZEN_COMMON, }, { - 'sourcepath': _portletpath('userMessagesPortlet.js'), - 'id': 'UserMsgsPortlet', - 'title': 'Messages', - 'permission': ZEN_COMMON + "sourcepath": _portletpath("userMessagesPortlet.js"), + "id": "UserMsgsPortlet", + "title": "Messages", + "permission": ZEN_COMMON, }, ] + def register_default_portlets(portletmanager): for portlet in portlets: - if portletmanager.find(portlet['id']) is None: + if portletmanager.find(portlet["id"]) is None: portletmanager.register_portlet(**portlet) diff --git a/Products/ZenWidgets/ZenossPortlets/__init__.py b/Products/ZenWidgets/ZenossPortlets/__init__.py index de5b4971fc..8f3a86088f 100644 --- a/Products/ZenWidgets/ZenossPortlets/__init__.py +++ b/Products/ZenWidgets/ZenossPortlets/__init__.py @@ -1,11 +1,8 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - - - diff --git a/Products/ZenWidgets/__init__.py b/Products/ZenWidgets/__init__.py index 9d42478b9f..9331715c86 100644 --- a/Products/ZenWidgets/__init__.py +++ b/Products/ZenWidgets/__init__.py @@ -1,60 +1,51 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -"""__init__ - -Initializer for ZenTableManager - -$Id: __init__.py,v 1.3 2004/04/04 23:56:49 edahl Exp $""" - -__version__ = 0.5 -__revision__ = "$Revision: 1.3 $"[11:-2] - - +from Products.CMFCore.DirectoryView import registerDirectory from Products.Five.browser import BrowserView -from ZenTableManager import ZenTableManager -from ZenTableManager import manage_addZenTableManager -try: - from Products.CMFCore.DirectoryView import registerDirectory - registerDirectory('skins', globals()) -except ImportError: pass +from .ZenossPortlets.ZenossPortlets import register_default_portlets +from .ZenTableManager import manage_addZenTableManager, ZenTableManager + +registerDirectory("skins", globals()) -from ZenossPortlets.ZenossPortlets import register_default_portlets def update_portlets(app): + """Reread in portlet source on startup. + + If this is the initial load, and objects don't exist yet, don't do + anything. """ - Reread in portlet source on startup. If this is the initial load, and - objects don't exist yet, don't do anything. - """ - if hasattr(app, 'zport') and hasattr(app.zport, 'ZenPortletManager'): + if hasattr(app, "zport") and hasattr(app.zport, "ZenPortletManager"): register_default_portlets(app.zport.ZenPortletManager) for pack in app.zport.dmd.ZenPackManager.packs(): - for portlet in getattr(pack, 'register_portlets', lambda *x:())(): - if app.zport.ZenPortletManager.find(portlet['id']) is None: - app.zport.ZenPortletManager.register_extjsPortlet(**portlet) + for portlet in getattr(pack, "register_portlets", lambda *x: ())(): + if app.zport.ZenPortletManager.find(portlet["id"]) is None: + app.zport.ZenPortletManager.register_extjsPortlet( + **portlet + ) + def initialize(registrar): registrar.registerClass( ZenTableManager, permission="Add ZenTableManager", - constructors = (manage_addZenTableManager,), - icon = "ZenTableManager_icon.gif" + constructors=(manage_addZenTableManager,), + icon="ZenTableManager_icon.gif", ) + def registerPortlets(event): - """ - Handler for IZopeApplicationOpenedEvent which registers portlets. - """ + """Handler for IZopeApplicationOpenedEvent which registers portlets.""" update_portlets(event.app) + class ExtJSShortcut(BrowserView): def __getitem__(self, name): - return self.context.unrestrictedTraverse('++resource++extjs')[name] + return self.context.unrestrictedTraverse("++resource++extjs")[name] diff --git a/Products/ZenWidgets/browser/Portlets.py b/Products/ZenWidgets/browser/Portlets.py index 8428961b26..f8910a1ee6 100644 --- a/Products/ZenWidgets/browser/Portlets.py +++ b/Products/ZenWidgets/browser/Portlets.py @@ -1,38 +1,36 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -import re -import json +import logging from Products.Five.browser import BrowserView from Products.AdvancedQuery import Eq, In, And - -from Products.ZenUtils.Utils import relative_time -from Products.Zuul import getFacade -from Products.ZenEvents.HeartbeatUtils import getHeartbeatObjects from zenoss.protocols.services import ServiceException from zenoss.protocols.services.zep import ZepConnectionError + +from Products.ZenEvents.browser.EventPillsAndSummaries import ( + getDashboardObjectsEventSummary, + getEventPillME, + ObjectsEventSummary, +) +from Products.ZenEvents.HeartbeatUtils import getHeartbeatObjects +from Products.ZenModel.Device import Device +from Products.ZenModel.ZenossSecurity import ZEN_VIEW from Products.ZenUtils.guid.interfaces import IGUIDManager from Products.ZenUtils.jsonutils import json -from Products.ZenUtils.Utils import nocache, formreq, extractPostContent -from Products.ZenWidgets import messaging -from Products.ZenModel.Device import Device -from Products.ZenModel.ZenossSecurity import * -from Products.ZenEvents.browser.EventPillsAndSummaries import \ - getDashboardObjectsEventSummary, \ - ObjectsEventSummary, \ - getEventPillME +from Products.ZenUtils.Utils import nocache, formreq, relative_time +from Products.Zuul import getFacade from Products.Zuul.catalog.interfaces import IModelCatalogTool -import logging -log = logging.getLogger('zen.portlets') +from .. import messaging + +log = logging.getLogger("zen.portlets") def zepConnectionError(retval=None): @@ -40,21 +38,28 @@ def outer(func): def inner(self, *args, **kwargs): try: return func(self, *args, **kwargs) - except ZepConnectionError as e: - msg = 'Connection refused. Check zeneventserver status on Services' - messaging.IMessageSender(self.context).sendToBrowser("ZEP connection error", - msg, - priority=messaging.CRITICAL, - sticky=True) + except ZepConnectionError: + msg = ( + "Connection refused. Check zeneventserver status on " + 'Services' + ) + messaging.IMessageSender(self.context).sendToBrowser( + "ZEP connection error", + msg, + priority=messaging.CRITICAL, + sticky=True, + ) log.warn("Could not connect to ZEP") return retval + return inner + return outer + class TopLevelOrganizerPortletView(ObjectsEventSummary): - """ - Return JSON event summaries for a root organizer. - """ + """Return JSON event summaries for a root organizer.""" + @nocache @formreq def __call__(self, dataRoot): @@ -70,13 +75,14 @@ class ProductionStatePortletView(BrowserView): Return a map of device to production state in a format suitable for a YUI data table. """ + @nocache @formreq def __call__(self, *args, **kwargs): return self.getDevProdStateJSON(*args, **kwargs) @json - def getDevProdStateJSON(self, prodStates=['Maintenance']): + def getDevProdStateJSON(self, prodStates=["Maintenance"]): """ Return a map of device to production state in a format suitable for a YUI data table. @@ -101,20 +107,27 @@ def getProdStateInt(prodStateString): numericProdStates = [getProdStateInt(p) for p in prodStates] catalog = IModelCatalogTool(self.context.getPhysicalRoot().zport.dmd) - query = In('productionState', numericProdStates) - - query = And(query, Eq('objectImplements', 'Products.ZenModel.Device.Device')) - objects = list(catalog.search(query=query, orderby='id', fields="uuid")) + query = In("productionState", numericProdStates) + + query = And( + query, Eq("objectImplements", "Products.ZenModel.Device.Device") + ) + objects = list( + catalog.search(query=query, orderby="id", fields="uuid") + ) devs = (x.getObject() for x in objects) - mydict = {'columns':['Device', 'Prod State'], 'data':[]} + mydict = {"columns": ["Device", "Prod State"], "data": []} for dev in devs: - if not self.context.checkRemotePerm(ZEN_VIEW, dev): continue - mydict['data'].append({ - 'Device' : dev.getPrettyLink(), - 'Prod State' : dev.getProdState() - }) - if len(mydict['data'])>=100: + if not self.context.checkRemotePerm(ZEN_VIEW, dev): + continue + mydict["data"].append( + { + "Device": dev.getPrettyLink(), + "Prod State": dev.getProdState(), + } + ) + if len(mydict["data"]) >= 100: break return mydict @@ -132,6 +145,7 @@ class WatchListPortletView(BrowserView): of the table @rtype: string """ + @nocache @formreq def __call__(self, *args, **kwargs): @@ -143,24 +157,27 @@ def getEntityListEventSummary(self, entities=None): entities = [] elif isinstance(entities, basestring): entities = [entities] + def getob(e): e = str(e) try: - if not e.startswith('/zport/dmd'): - bigdev = '/zport/dmd' + e + if not e.startswith("/zport/dmd"): + bigdev = "/zport/dmd" + e obj = self.context.dmd.unrestrictedTraverse(bigdev) except (AttributeError, KeyError): obj = self.context.dmd.Devices.findDevice(e) - if self.context.has_permission("View", obj): return obj - entities = filter(lambda x:x is not None, map(getob, entities)) + if self.context.has_permission("View", obj): + return obj + + entities = filter(lambda x: x is not None, map(getob, entities)) return getDashboardObjectsEventSummary( - self.context.dmd.ZenEventManager, entities) + self.context.dmd.ZenEventManager, entities + ) class DeviceIssuesPortletView(BrowserView): - """ - A list of devices with issues. - """ + """A list of devices with issues.""" + @nocache def __call__(self): return self.getDeviceIssuesJSON() @@ -179,18 +196,17 @@ def getDeviceIssuesJSON(self): {'Device':'', 'Events':'
'}, ]}" """ - mydict = {'columns':[], 'data':[]} - mydict['columns'] = ['Device', 'Events'] + mydict = {"columns": [], "data": []} + mydict["columns"] = ["Device", "Events"] deviceinfo = self.getDeviceDashboard() for alink, pill in deviceinfo: - mydict['data'].append({'Device':alink, - 'Events':pill}) + mydict["data"].append({"Device": alink, "Events": pill}) return mydict @zepConnectionError([]) def getDeviceDashboard(self): """return device info for bad device to dashboard""" - zep = getFacade('zep') + zep = getFacade("zep") manager = IGUIDManager(self.context.dmd) deviceSeverities = zep.getDeviceIssuesDict() zem = self.context.dmd.ZenEventManager @@ -199,49 +215,62 @@ def getDeviceDashboard(self): for uuid in deviceSeverities.keys(): uuid_data = {} - uuid_data['uuid'] = uuid + uuid_data["uuid"] = uuid severities = deviceSeverities[uuid] try: - uuid_data['severities'] = dict((zep.getSeverityName(sev).lower(), counts) for (sev, counts) in severities.iteritems()) + uuid_data["severities"] = dict( + (zep.getSeverityName(sev).lower(), counts) + for (sev, counts) in severities.iteritems() + ) except ServiceException: continue bulk_data.append(uuid_data) - bulk_data.sort(key=lambda x:(x['severities']['critical'], x['severities']['error'], x['severities']['warning']), reverse=True) + bulk_data.sort( + key=lambda x: ( + x["severities"]["critical"], + x["severities"]["error"], + x["severities"]["warning"], + ), + reverse=True, + ) devices_found = 0 MAX_DEVICES = 100 devdata = [] for data in bulk_data: - uuid = data['uuid'] - severities = data['severities'] + uuid = data["uuid"] + severities = data["severities"] dev = manager.getObject(uuid) if dev and isinstance(dev, Device): - if (not zem.checkRemotePerm(ZEN_VIEW, dev) + if ( + not zem.checkRemotePerm(ZEN_VIEW, dev) or dev.getProductionState() < zem.prodStateDashboardThresh - or dev.priority < zem.priorityDashboardThresh): + or dev.priority < zem.priorityDashboardThresh + ): continue alink = dev.getPrettyLink() pill = getEventPillME(dev, severities=severities) - evts = [alink,pill] + evts = [alink, pill] devdata.append(evts) devices_found = devices_found + 1 if devices_found >= MAX_DEVICES: break return devdata -heartbeat_columns = ['Host', 'Daemon Process', 'Seconds Down'] + +heartbeat_columns = ["Host", "Daemon Process", "Seconds Down"] + class HeartbeatPortletView(BrowserView): - """ - Heartbeat issues in YUI table form, for the dashboard portlet - """ + """Heartbeat issues in YUI table form, for the dashboard portlet.""" + @nocache def __call__(self): return self.getHeartbeatIssuesJSON() - @zepConnectionError({'columns': heartbeat_columns, 'data':[]}) + @zepConnectionError({"columns": heartbeat_columns, "data": []}) @json def getHeartbeatIssuesJSON(self): """ @@ -254,15 +283,15 @@ def getHeartbeatIssuesJSON(self): {'Device':'', 'Daemon':'zenhub', 'Seconds':10} ]}" """ - data = getHeartbeatObjects(deviceRoot=self.context.dmd.Devices, - keys=heartbeat_columns) - return {'columns': heartbeat_columns, 'data': data} + data = getHeartbeatObjects( + deviceRoot=self.context.dmd.Devices, keys=heartbeat_columns + ) + return {"columns": heartbeat_columns, "data": data} class UserMessagesPortletView(BrowserView): - """ - User messages in YUI table form, for the dashboard portlet. - """ + """User messages in YUI table form, for the dashboard portlet.""" + @nocache @json def __call__(self): @@ -276,20 +305,24 @@ def __call__(self): {'Device':'', 'Daemon':'zenhub', 'Seconds':10} ]}" """ - ICONS = ['/zport/dmd/img/agt_action_success-32.png', - '/zport/dmd/img/messagebox_warning-32.png', - '/zport/dmd/img/agt_stop-32.png'] + ICONS = [ + "/zport/dmd/img/agt_action_success-32.png", + "/zport/dmd/img/messagebox_warning-32.png", + "/zport/dmd/img/agt_stop-32.png", + ] msgbox = messaging.IUserMessages(self.context) msgs = msgbox.get_messages() - cols = ['Message'] + cols = ["Message"] res = [] for msg in msgs: - res.append(dict( - title = msg.title, - imgpath = ICONS[msg.priority], - body = msg.body, - ago = relative_time(msg.timestamp), - deletelink = msg.absolute_url_path() + '/delMsg' - )) + res.append( + dict( + title=msg.title, + imgpath=ICONS[msg.priority], + body=msg.body, + ago=relative_time(msg.timestamp), + deletelink=msg.absolute_url_path() + "/delMsg", + ) + ) res.reverse() - return { 'columns': cols, 'data': res } + return {"columns": cols, "data": res} diff --git a/Products/ZenWidgets/browser/__init__.py b/Products/ZenWidgets/browser/__init__.py index de5b4971fc..8f3a86088f 100644 --- a/Products/ZenWidgets/browser/__init__.py +++ b/Products/ZenWidgets/browser/__init__.py @@ -1,11 +1,8 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - - - diff --git a/Products/ZenWidgets/browser/messaging.py b/Products/ZenWidgets/browser/messaging.py index bf5a324690..c12ccf328f 100644 --- a/Products/ZenWidgets/browser/messaging.py +++ b/Products/ZenWidgets/browser/messaging.py @@ -1,42 +1,46 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from Products.Five.browser import BrowserView from Products.ZenUtils.jsonutils import json -from Products.ZenModel.ZenossSecurity import * -from Products.ZenWidgets.interfaces import IUserMessages, IBrowserMessages -from Products.ZenWidgets import messaging + +from .. import messaging +from ..interfaces import IBrowserMessages, IUserMessages + class UserMessages(BrowserView): """ Delivers up user messages for the current user to the client-side YAHOO.zenoss.Messenger. """ + @json def __call__(self): messages = IUserMessages(self.context).get_unread() messages.extend(IBrowserMessages(self.context).get_unread()) - messages.sort(key=lambda x:x.timestamp) + messages.sort(key=lambda x: x.timestamp) result = [] for message in messages: - result.append(dict( - sticky=message.priority>=messaging.CRITICAL and True or False, - image=message.image, - title=message.title, - body=message.body, - priority=message.priority - )) + result.append( + dict( + sticky=message.priority >= messaging.CRITICAL + and True + or False, + image=message.image, + title=message.title, + body=message.body, + priority=message.priority, + ) + ) message.mark_as_read() - result = {'totalRecords':len(result), - 'messages':result} + result = {"totalRecords": len(result), "messages": result} return result diff --git a/Products/ZenWidgets/browser/quickstart/__init__.py b/Products/ZenWidgets/browser/quickstart/__init__.py index de5b4971fc..8f3a86088f 100644 --- a/Products/ZenWidgets/browser/quickstart/__init__.py +++ b/Products/ZenWidgets/browser/quickstart/__init__.py @@ -1,11 +1,8 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - - - diff --git a/Products/ZenWidgets/browser/quickstart/userViews.py b/Products/ZenWidgets/browser/quickstart/userViews.py index 4a5cbd3a50..e5dea600ee 100644 --- a/Products/ZenWidgets/browser/quickstart/userViews.py +++ b/Products/ZenWidgets/browser/quickstart/userViews.py @@ -1,32 +1,33 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - import logging -log = logging.getLogger("zen.widgets.userviews") +from Products.CMFCore.utils import getToolByName from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile + +from Products.ZenModel.Quickstart import getTopQuickstartStep from Products.ZenUtils import Ext from Products.ZenUtils.csrf import check_csrf_token -from Products.CMFCore.utils import getToolByName -from Products.ZenModel.Quickstart import getTopQuickstartStep + +log = logging.getLogger("zen.widgets.userviews") class SetAdminPasswordException(Exception): - """There was a problem setting the admin password""" + """There was a problem setting the admin password.""" + class CreateUserView(BrowserView): - """ - Creates the initial user and sets the admin password. - """ - __call__ = ZopeTwoPageTemplateFile('templates/createuser.pt') + """Creates the initial user and sets the admin password.""" + + __call__ = ZopeTwoPageTemplateFile("templates/createuser.pt") @Ext.form_action def createUser(self): @@ -43,35 +44,47 @@ def createUser(self): userPassword = self.request.form.get("password1") emailAddress = self.request.form.get("emailAddress") - zenUsers = getToolByName(self.context, 'ZenUsers') + zenUsers = getToolByName(self.context, "ZenUsers") # Set admin password try: - admin = zenUsers.getUserSettings('admin') - admin.manage_editUserSettings(password=adminPassword, - sndpassword=adminPassword, - roles=('ZenManager', 'Manager'), - oldpassword='zenoss') + admin = zenUsers.getUserSettings("admin") + admin.manage_editUserSettings( + password=adminPassword, + sndpassword=adminPassword, + roles=("ZenManager", "Manager"), + oldpassword="zenoss", + ) except Exception: log.exception("Failed to set admin password") - response.error('admin-password1', - "There was a problem setting the admin password.") + response.error( + "admin-password1", + "There was a problem setting the admin password.", + ) - if not zenUsers.checkValidId(userName) == True: - response.error('username', 'That username already exists.') + if not zenUsers.checkValidId(userName) is True: + response.error("username", "That username already exists.") else: - ret = zenUsers.manage_addUser(userName, userPassword, - ('Manager',), REQUEST=None, email=emailAddress) + ret = zenUsers.manage_addUser( + userName, + userPassword, + ("Manager",), + REQUEST=None, + email=emailAddress, + ) if ret is None: - response.error('username', - 'We were unable to add a user at this time.' - ' Check your installation.') + response.error( + "username", + "We were unable to add a user at this time." + " Check your installation.", + ) if not response.has_errors(): # Log out, so the form can log us in as the new user - acl_users = self.context.getPhysicalRoot().acl_users + _ = self.context.getPhysicalRoot().acl_users self.context.acl_users.resetCredentials( - self.request, self.request.response) + self.request, self.request.response + ) # Don't run the quickstart next time self.context.dmd._rq = True diff --git a/Products/ZenWidgets/browser/quickstart/views.py b/Products/ZenWidgets/browser/quickstart/views.py index 6faa14c13c..c6ec4a7a4f 100644 --- a/Products/ZenWidgets/browser/quickstart/views.py +++ b/Products/ZenWidgets/browser/quickstart/views.py @@ -7,55 +7,61 @@ # ############################################################################## - -import re -import logging import cgi +import logging +import re + from Acquisition import aq_base from Products.Five.browser import BrowserView from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile + +from Products.ZenMessaging.audit import audit from Products.ZenModel.IpNetwork import AutoDiscoveryJob -from Products.ZenWidgets.messaging import IMessageSender from Products.ZenUtils import Ext from Products.ZenUtils.jsonutils import json -from Products.ZenMessaging.audit import audit -_is_network = lambda x: bool(re.compile(r'^(\d+\.){3}\d+\/\d+$').search(x)) -_is_range = lambda x: bool(re.compile(r'^(\d+\.){3}\d+\-\d+$').search(x)) +from ...messaging import IMessageSender + log = logging.getLogger("zen.quickstart") + +def _is_network(x): + return bool(re.compile(r"^(\d+\.){3}\d+\/\d+$").search(x)) + + +def _is_range(x): + return bool(re.compile(r"^(\d+\.){3}\d+\-\d+$").search(x)) + + class QuickstartBase(BrowserView): - """ - Standard macros for the quickstart. - """ - template = ZopeTwoPageTemplateFile('templates/quickstart_macros.pt') + """Standard macros for the quickstart.""" + + template = ZopeTwoPageTemplateFile("templates/quickstart_macros.pt") def __getitem__(self, key): return self.template.macros[key] class OutlineView(BrowserView): - """ - Displays the steps the user will soon be completing. The anticipation! - """ - __call__ = ZopeTwoPageTemplateFile('templates/outline.pt') + """Displays the steps the user will soon be completing.""" + + __call__ = ZopeTwoPageTemplateFile("templates/outline.pt") class CreateUserView(BrowserView): - """ - Creates the initial user and sets the admin password. - """ - __call__ = ZopeTwoPageTemplateFile('templates/createuser.pt') + """Creates the initial user and sets the admin password.""" + + __call__ = ZopeTwoPageTemplateFile("templates/createuser.pt") class DeviceAddView(BrowserView): - """ - Specify devices to be added. - """ + """Specify devices to be added.""" + @property def hasLDAPInstalled(self): try: - import ZenPacks.zenoss.LDAPAuthenticator + import ZenPacks.zenoss.LDAPAuthenticator # noqa F401 + # return javascript true/false return "true" except ImportError: @@ -67,19 +73,19 @@ def default_communities(self): Format the value of Devices.Discovered.zSnmpCommunities for a textarea """ devclass = self.context.dmd.Devices.Discovered.primaryAq() - return '\n'.join(devclass.zSnmpCommunities) + return "\n".join(devclass.zSnmpCommunities) def _assemble_types_list(self): """ Walks all device classes building a list of description/protocol pairs. """ - ALLOWED_PROTOCOLS = ('SSH', 'SNMP', 'WMI', 'WinRM') + ALLOWED_PROTOCOLS = ("SSH", "SNMP", "WMI", "WinRM") devclass = self.context.dmd.Devices orgs = devclass.getSubOrganizers() types = [] for org in orgs: # Skip it if it doesn't have types registered - if not hasattr(aq_base(org), 'devtypes') or not org.devtypes: + if not hasattr(aq_base(org), "devtypes") or not org.devtypes: continue for t in org.devtypes: try: @@ -93,16 +99,23 @@ def _assemble_types_list(self): # special case for migrating from WMI to WinRM so we # can allow the zenpack to be backwards compatible - if org.getOrganizerName() == '/Server/Microsoft/Windows' and ptcl == 'WMI': + if ( + org.getOrganizerName() == "/Server/Microsoft/Windows" + and ptcl == "WMI" + ): ptcl = "WinRM" # We only care about orgs with acceptable protocols - if ptcl not in ALLOWED_PROTOCOLS: continue + if ptcl not in ALLOWED_PROTOCOLS: + continue types.append((org.getOrganizerName(), desc, ptcl)) return types @json def collectors(self): - return [[name] for name in self.context.dmd.Monitors.getPerformanceMonitorNames()] + return [ + [name] + for name in self.context.dmd.Monitors.getPerformanceMonitorNames() + ] @json def device_types(self): @@ -115,12 +128,16 @@ def device_types(self): appropriate ZenPack installed?). """ # Turn them into the dictionary format expected - types = {'win':[], 'ssh':[], 'snmp':[], 'winrm': []} + types = {"win": [], "ssh": [], "snmp": [], "winrm": []} for t in self._assemble_types_list(): - if t[2]=='WMI': types['win'].append(t) - elif t[2]=='SNMP': types['snmp'].append(t) - elif t[2]=='SSH': types['ssh'].append(t) - elif t[2]=='WinRM': types['win'].append(t) + if t[2] == "WMI": + types["win"].append(t) + elif t[2] == "SNMP": + types["snmp"].append(t) + elif t[2] == "SSH": + types["ssh"].append(t) + elif t[2] == "WinRM": + types["win"].append(t) def dev_class_exists(path): """ @@ -128,8 +145,7 @@ def dev_class_exists(path): exists. """ try: - self.context.unrestrictedTraverse( - '/zport/dmd/Devices' + path) + self.context.unrestrictedTraverse("/zport/dmd/Devices" + path) except AttributeError: return False else: @@ -140,10 +156,13 @@ def format_type(credtype, classpath, description, protocol): Turn information representing a device class into a dictionary of the format our ComboBox expects. """ - value = '%s_%s' % (classpath, credtype) - return dict(value=value, - shortdesc="%s (%s)" % (description, protocol), - description=description, protocol=protocol) + value = "%s_%s" % (classpath, credtype) + return dict( + value=value, + shortdesc="%s (%s)" % (description, protocol), + description=description, + protocol=protocol, + ) # Iterate over all types response = [] @@ -155,148 +174,184 @@ def format_type(credtype, classpath, description, protocol): response.append(format_type(credtype, *devtype)) # Sort alphabetically by description - response.sort(key=lambda x:x['description']) + response.sort(key=lambda x: x["description"]) # Final response needs an object under a defined root, in this case # "types" return dict(types=response) - @Ext.form_action def autodiscovery(self): response = Ext.FormResponse() - submitted = self.request.form.get('network', []) + submitted = self.request.form.get("network", []) if isinstance(submitted, basestring): submitted = [submitted] zProperties = { - 'zCommandUsername': self.request.form.get('sshusername'), - 'zCommandPassword': self.request.form.get('sshpass'), - 'zWinRMUser': self.request.form.get('winusername'), - 'zWinRMPassword': self.request.form.get('winpass'), - 'zSnmpCommunities': self.request.form.get('snmpcommunities').splitlines() + "zCommandUsername": self.request.form.get("sshusername"), + "zCommandPassword": self.request.form.get("sshpass"), + "zWinRMUser": self.request.form.get("winusername"), + "zWinRMPassword": self.request.form.get("winpass"), + "zSnmpCommunities": self.request.form.get( + "snmpcommunities" + ).splitlines(), } - collector = self.request.form.get('autodiscovery_collector', 'localhost') + collector = self.request.form.get( + "autodiscovery_collector", "localhost" + ) # Split rows into networks and ranges nets = [] ranges = [] for row in submitted: - if _is_network(row): nets.append(row) - elif _is_range(row): ranges.append(row) + if _is_network(row): + nets.append(row) + elif _is_range(row): + ranges.append(row) if not nets and not ranges: - response.error('network', - 'You must enter at least one network or IP range.') + response.error( + "network", "You must enter at least one network or IP range." + ) if nets: for net in nets: # Make the network if it doesn't exist, so zendisc has # something to discover - _n = self.context.dmd.Networks.createNet(net) + _ = self.context.dmd.Networks.createNet(net) try: - netdesc = ("network %s" % nets[0] if len(nets)==1 - else "%s networks" % len(nets)) + netdesc = ( + "network %s" % nets[0] + if len(nets) == 1 + else "%s networks" % len(nets) + ) self.context.JobManager.addJob( AutoDiscoveryJob, description="Discover %s" % netdesc, kwargs=dict( - nets=nets, - zProperties=zProperties, - collector=collector - ) + nets=nets, zProperties=zProperties, collector=collector + ), ) except Exception as e: log.exception(e) - response.error('network', 'There was an error scheduling this ' - 'job. Please check your installation and try ' - 'again.') + response.error( + "network", + "There was an error scheduling this job. " + "Please check your installation and try again.", + ) else: IMessageSender(self.context).sendToUser( - 'Autodiscovery Task Created', - 'Discovery of the following networks is in progress: %s' % ( - ', '.join(nets)) + "Autodiscovery Task Created", + "Discovery of the following networks is in progress: %s" + % (", ".join(nets)), ) if ranges: # Ranges can just be sent to zendisc, as they are merely sets of # IPs try: - rangedesc = ("IP range %s" % ranges[0] - if len(ranges)==1 - else "%s IP ranges" % len(ranges)) + rangedesc = ( + "IP range %s" % ranges[0] + if len(ranges) == 1 + else "%s IP ranges" % len(ranges) + ) self.context.JobManager.addJob( AutoDiscoveryJob, description="Discover %s" % rangedesc, kwargs=dict( ranges=ranges, zProperties=zProperties, - collector=collector - ) + collector=collector, + ), ) except Exception as e: log.exception(e) - response.error('network', 'There was an error scheduling this ' - 'job. Please check your installation and try ' - 'again.') + response.error( + "network", + "There was an error scheduling this job. " + "Please check your installation and try again.", + ) else: IMessageSender(self.context).sendToUser( - 'Autodiscovery Task Created', - 'Discovery of the following IP ranges is in progress: %s' % ( - ', '.join(ranges)) + "Autodiscovery Task Created", + "Discovery of the following IP ranges is in progress: %s" + % (", ".join(ranges)), ) - audit('UI.Device.Autodiscovery', networks=','.join(nets), ipRanges=','.join(ranges)) - response.redirect('/zport/dmd') + audit( + "UI.Device.Autodiscovery", + networks=",".join(nets), + ipRanges=",".join(ranges), + ) + response.redirect("/zport/dmd") return response - @Ext.form_action def manual(self): # Pull all the device name keys response = Ext.FormResponse() - devs = filter(lambda x:x.startswith('device_'), - self.request.form.keys()) + devs = filter( + lambda x: x.startswith("device_"), self.request.form.keys() + ) # Make sure we have at least one device name - devnames = filter(lambda x:bool(self.request.form.get(x)), devs) + devnames = filter(lambda x: bool(self.request.form.get(x)), devs) if not devnames: - response.error('device_0', - 'You must enter at least one hostname/IP.') + response.error( + "device_0", "You must enter at least one hostname/IP." + ) return response # Create jobs based on info passed for k in devs: # Ignore empty device names - if not self.request.form.get(k): continue - idx = k.split('_')[1] + if not self.request.form.get(k): + continue + idx = k.split("_")[1] devclass, type_ = self.request.form.get( - 'deviceclass_%s' % idx).split('_') - collector = self.request.form.get('collector_' + str(idx), 'localhost') + "deviceclass_%s" % idx + ).split("_") + collector = self.request.form.get( + "collector_" + str(idx), "localhost" + ) # Set zProps based on type - if type_=='ssh': + if type_ == "ssh": zProps = { - 'zCommandUsername': self.request.form.get('sshuser_%s' % idx), - 'zCommandPassword': self.request.form.get( - 'sshpass_%s' % idx), + "zCommandUsername": self.request.form.get( + "sshuser_%s" % idx + ), + "zCommandPassword": self.request.form.get( + "sshpass_%s" % idx + ), } - elif type_=='win': + elif type_ == "win": zProps = { - 'zWinRMUser': self.request.form.get('winuser_%s' % idx), - 'zWinRMPassword': self.request.form.get('winpass_%s' % idx), + "zWinRMUser": self.request.form.get("winuser_%s" % idx), + "zWinRMPassword": self.request.form.get( + "winpass_%s" % idx + ), } - elif type_=='snmp': + elif type_ == "snmp": zProps = { - 'zSnmpCommunities': self.request.form.get( - 'snmpcomm_%s' % idx + "zSnmpCommunities": self.request.form.get( + "snmpcomm_%s" % idx ).splitlines() } deviceName = self.request.form.get(k) perfConf = self.context.Monitors.getPerformanceMonitor(collector) - perfConf.addCreateDeviceJob(deviceName=deviceName, performanceMonitor=collector, - devicePath=devclass, zProperties=zProps, discoverProto='auto') - deviceClassUid = '/Devices' + devclass - deviceUid = '/'.join([deviceClassUid, 'devices', deviceName]) - audit('UI.Device.Add', deviceUid, deviceClass=deviceClassUid, model=True) + perfConf.addCreateDeviceJob( + deviceName=deviceName, + performanceMonitor=collector, + devicePath=devclass, + zProperties=zProps, + discoverProto="auto", + ) + deviceClassUid = "/Devices" + devclass + deviceUid = "/".join([deviceClassUid, "devices", deviceName]) + audit( + "UI.Device.Add", + deviceUid, + deviceClass=deviceClassUid, + model=True, + ) devnames = [self.request.form.get(dev) for dev in devs] IMessageSender(self.context).sendToUser( - 'Devices Added', - 'Modeling of the following devices has been scheduled: %s' % ( - cgi.escape(', '.join(filter(None, devnames))) - ) + "Devices Added", + "Modeling of the following devices has been scheduled: %s" + % (cgi.escape(", ".join(filter(None, devnames)))), ) - response.redirect('/zport/dmd') + response.redirect("/zport/dmd") return response diff --git a/Products/ZenWidgets/interfaces.py b/Products/ZenWidgets/interfaces.py index 7ef424fdc1..dc0935cffc 100644 --- a/Products/ZenWidgets/interfaces.py +++ b/Products/ZenWidgets/interfaces.py @@ -1,22 +1,23 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - -from zope.interface import Interface, Attribute from zope.container.interfaces import IContained, IContainer +from zope.interface import Interface, Attribute class IMessage(IContained): + """A single message. + + Messages are stored in user-specific MessageQueue objects and in the + session object. """ - A single message. Messages are stored in user-specific MessageQueue objects - and in the session object. - """ + title = Attribute("Title of the message") body = Attribute("Body of the message") image = Attribute("Optional path to image to be displayed") @@ -25,28 +26,24 @@ class IMessage(IContained): sticky = Attribute("Explicitly designate stickiness") def delete(): - """ - Delete this message from any queues in which it exists. - """ + """Delete this message from any queues in which it exists.""" + def mark_as_read(): - """ - Mark this message as read. - """ + """Mark this message as read.""" class IMessageSender(Interface): - """ - Something able to send messages. - """ + """Something able to send messages.""" + def sendToBrowser(title, body, priority, image=None, sticky=None): - """ - Create a message and store it on the request object. - """ + """Create a message and store it on the request object.""" + def sendToUser(title, body, priority, image=None, user=None): """ Create a message and store it in the L{IMessageQueue} of the user specified. If no user is specified, use the queue of the current user. """ + def sendToAll(title, body, priority, image=None): """ For eash user in the system, create an identical message and store it @@ -55,33 +52,24 @@ def sendToAll(title, body, priority, image=None): class IMessageQueue(IContainer): - """ - Marker interface for a message container. - """ + """Marker interface for a message container.""" class IMessageBox(Interface): - """ - Something that can provide messages. - """ + """Something that can provide messages.""" + messagebox = Attribute("The source of IMessage objects.") + def get_messages(): - """ - Return all messages. - """ + """Return all messages.""" + def get_unread(): - """ - Return all messages that have not been marked as read. - """ + """Return all messages that have not been marked as read.""" class IUserMessages(IMessageBox): - """ - Object that is able to provide IMessage objects from a user queue. - """ + """Object that is able to provide IMessage objects from a user queue.""" class IBrowserMessages(IMessageBox): - """ - Object that is able to provide IMessage objects from the request. - """ + """Object that is able to provide IMessage objects from the request.""" diff --git a/Products/ZenWidgets/messaging.py b/Products/ZenWidgets/messaging.py index 9b7370b1fc..5b4dde8f4a 100644 --- a/Products/ZenWidgets/messaging.py +++ b/Products/ZenWidgets/messaging.py @@ -1,32 +1,40 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## + import cgi import time -from zope.interface import implements from Products.CMFCore.utils import getToolByName -from Products.ZenRelations.utils import ZenRelationshipNameChooser -from Products.ZenWidgets.interfaces import * +from zope.interface import implementer +from Products.ZenRelations.utils import ZenRelationshipNameChooser +from Products.ZenWidgets.interfaces import ( + IBrowserMessages, + IMessage, + IMessageBox, + IMessageSender, + IUserMessages, +) # Constants representing priorities. # Parallel definitions exist in zenoss.js. -INFO = 0 -WARNING = 1 +INFO = 0 +WARNING = 1 CRITICAL = 2 + +@implementer(IMessage) class BrowserMessage(object): + """A single message. + + Messages are stored on UserSettings and in the session object. """ - A single message. Messages are stored on UserSettings and in the session - object. - """ - implements(IMessage) __parent__ = None title = None @@ -36,8 +44,7 @@ class BrowserMessage(object): _read = False def __init__(self, title, body, priority=INFO, image=None, sticky=None): - """ - Initialization method. + """Initialize a BrowserMessage instance. @param title: The message title @type title: str @@ -56,12 +63,13 @@ def __init__(self, title, body, priority=INFO, image=None, sticky=None): self.sticky = sticky def delete(self): - """ - Delete this message from the system. + """Delete this message from the system. """ self._read = True - try: self.__parent__.remove(self) - except (ValueError): pass + try: + self.__parent__.remove(self) + except (ValueError): + pass del self def mark_as_read(self): @@ -69,18 +77,17 @@ def mark_as_read(self): self.delete() +@implementer(IMessageBox) class MessageBox(object): + """Adapter for all persistent objects. + + Provides a method, L{get_messages}, that retrieves L{Message} objects. """ - Adapter for all persistent objects. Provides a method, L{get_messages}, - that retrieves L{Message} objects. - """ - implements(IMessageBox) messagebox = None def get_unread(self, min_priority=INFO): - """ - Retrieve unread messages. + """Retrieve unread messages. @param min_priority: Optional minimum priority of messages to be returned; one of INFO, WARNING, CRITICAL @@ -89,12 +96,11 @@ def get_unread(self, min_priority=INFO): @rtype: list """ msgs = self.get_messages(min_priority) - msgs = filter(lambda x:not x._read, msgs) + msgs = filter(lambda x: not x._read, msgs) return msgs def get_messages(self, min_priority=INFO): - """ - Retrieve messages from the current users's session object. + """Retrieve messages from the current users's session object. @param min_priority: Optional minimum priority of messages to be returned; one of INFO, WARNING, CRITICAL @@ -102,69 +108,73 @@ def get_messages(self, min_priority=INFO): @return: A list of L{Message} objects. @rtype: list """ - msgs = sorted(self.messagebox, key=lambda x:x.timestamp) - msgs = filter(lambda x:x.priority>=min_priority, msgs) + msgs = sorted(self.messagebox, key=lambda x: x.timestamp) + msgs = filter(lambda x: x.priority >= min_priority, msgs) return msgs +@implementer(IBrowserMessages) class BrowserMessageBox(MessageBox): + """Adapter for all persistent objects. + + Provides a method, L{get_messages}, that retrieves L{Message} objects + from the current user's session. """ - Adapter for all persistent objects. Provides a method, L{get_messages}, - that retrieves L{Message} objects from the current user's session. - """ - implements(IBrowserMessages) + def __init__(self, context): - """ - Initialization method. + """Initialize a BrowserMessageBox instance. @param context: The object being adapted. Must have access to the - current request object via acquisition. + current request object via acquisition. @type context: Persistent """ self.context = context - self.messagebox = self.context.REQUEST.SESSION.get('messages', []) + self.messagebox = self.context.REQUEST.SESSION.get("messages", []) def get_unread(self, min_priority=INFO): - msgs = super(BrowserMessageBox, self).get_unread(min_priority=min_priority) + msgs = super(BrowserMessageBox, self).get_unread( + min_priority=min_priority + ) # force the session to persist if msgs: self.context.REQUEST.SESSION._p_changed = True return msgs + +@implementer(IUserMessages) class UserMessageBox(MessageBox): + """Adapter for all persistent objects. + + Provides a method, L{get_messages}, that retrieves L{Message} objects + from the current user's L{MessageQueue}. """ - Adapter for all persistent objects. Provides a method, L{get_messages}, - that retrieves L{Message} objects from the current user's L{MessageQueue}. - """ - implements(IUserMessages) + def __init__(self, context, user=None): - """ - Initialization method. + """Initialize a UserMessageBox instance. @param context: The object being adapted. Must have access to the dmd - via acquisition. + via acquisition. @type context: Persistent @param user: Optional username corresponding to the queue from which - messages will be retrieved. If left as C{None}, the - current user's queue will be used. + messages will be retrieved. If left as C{None}, the current + user's queue will be used. @type user: str """ self.context = context self.user = user - users = getToolByName(self.context, 'ZenUsers') + users = getToolByName(self.context, "ZenUsers") us = users.getUserSettings(self.user) self.messagebox = us.messages() +@implementer(IMessageSender) class MessageSender(object): """ Adapts persistent objects in order to provide message sending capability. """ - implements(IMessageSender) def __init__(self, context): - """ - Initialization method. + """Initialize a MessageSender instance. @param context: The object being adapted. Must have access to the dmd and the current request object via acquisition. @@ -172,9 +182,10 @@ def __init__(self, context): """ self.context = context - def sendToBrowser(self, title, body, priority=INFO, image=None, sticky=None): - """ - Create a message and store it on the session object. + def sendToBrowser( + self, title, body, priority=INFO, image=None, sticky=None + ): + """Create a message and store it on the session object. @param title: The message title @type title: str @@ -185,9 +196,9 @@ def sendToBrowser(self, title, body, priority=INFO, image=None, sticky=None): @param image: Optional URL of an image to be displayed in the message @type image: str """ - context = self.context.REQUEST.SESSION.get('messages') + context = self.context.REQUEST.SESSION.get("messages") if context is None: - self.context.REQUEST.SESSION['messages'] = context = [] + self.context.REQUEST.SESSION["messages"] = context = [] m = BrowserMessage(title, body, priority, image, sticky) m.__parent__ = context context.append(m) @@ -211,11 +222,12 @@ def sendToUser(self, title, body, priority=INFO, image=None, user=None): user's queue will be used. @type user: str """ - users = getToolByName(self.context, 'ZenUsers') + users = getToolByName(self.context, "ZenUsers") us = users.getUserSettings(user) - id = ZenRelationshipNameChooser(us.messages).chooseName('msg') + id = ZenRelationshipNameChooser(us.messages).chooseName("msg") # done in here to prevent recursive imports from ZenModelRM from PersistentMessage import PersistentMessage + m = PersistentMessage(id, title, body, priority, image) us.messages._setObject(m.id, m) @@ -233,18 +245,22 @@ def sendToAll(self, title, body, priority=INFO, image=None): @param image: Optional URL of an image to be displayed in the message @type image: str """ - users = getToolByName(self.context, 'ZenUsers') + users = getToolByName(self.context, "ZenUsers") for name in users.getAllUserSettingsNames(): self.sendToUser(title, body, priority, user=name, image=image) class ScriptMessageSender(MessageSender): + """Special message sender for use in scripts. + + Short-circuits sendToBrowser and sendToUser, since they don't really + apply. sendToAll should still work fine though. """ - Special message sender for use in scripts. Short-circuits sendToBrowser and - sendToUser, since they don't really apply. sendToAll should still work fine - though. - """ - def sendToBrowser(self, title, body, priority=INFO, image=None, sticky=None): + + def sendToBrowser( + self, title, body, priority=INFO, image=None, sticky=None + ): pass + def sendToUser(self, title, body, priority=INFO, image=None, user=None): pass diff --git a/Products/ZenWidgets/tests/__init__.py b/Products/ZenWidgets/tests/__init__.py index de5b4971fc..8f3a86088f 100644 --- a/Products/ZenWidgets/tests/__init__.py +++ b/Products/ZenWidgets/tests/__init__.py @@ -1,11 +1,8 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2007, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - - - diff --git a/Products/ZenWidgets/tests/test_Portlets.py b/Products/ZenWidgets/tests/test_Portlets.py index 611d19c82f..79095637e8 100644 --- a/Products/ZenWidgets/tests/test_Portlets.py +++ b/Products/ZenWidgets/tests/test_Portlets.py @@ -1,52 +1,59 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## import json +from Products import Zuul from Products.ZenTestCase.BaseTestCase import BaseTestCase from Products.ZenWidgets.browser.Portlets import ProductionStatePortletView -from Products import Zuul -class TestPortlets(BaseTestCase): +class TestPortlets(BaseTestCase): def afterSetUp(self): super(TestPortlets, self).afterSetUp() - self.facade = Zuul.getFacade('device', self.dmd) + self.facade = Zuul.getFacade("device", self.dmd) def test_ProductionStatePortletView(self): # Create some devices devices = self.dmd.Devices - test_device_maintenance = devices.createInstance('testDeviceMaintenance') - test_device_production = devices.createInstance('testDeviceProduction') + test_device_maintenance = devices.createInstance( + "testDeviceMaintenance" + ) + test_device_production = devices.createInstance("testDeviceProduction") test_device_maintenance.setProdState(300) test_device_production.setProdState(1000) psPortlet = ProductionStatePortletView(self.dmd, self.dmd.REQUEST) - + # filter by maintenance result = json.loads(psPortlet()) - self.assertEqual(len(result['data']), 1) - self.assertEqual(result['data'][0]['Device'], test_device_maintenance.getPrettyLink()) + self.assertEqual(len(result["data"]), 1) + self.assertEqual( + result["data"][0]["Device"], + test_device_maintenance.getPrettyLink(), + ) # filter by production result = json.loads(psPortlet("Production")) - self.assertEqual(len(result['data']), 1) - self.assertEqual(result['data'][0]['Device'], test_device_production.getPrettyLink()) + self.assertEqual(len(result["data"]), 1) + self.assertEqual( + result["data"][0]["Device"], test_device_production.getPrettyLink() + ) # filter by both result = json.loads(psPortlet(["Production", "Maintenance"])) - self.assertEqual(len(result['data']), 2) - + self.assertEqual(len(result["data"]), 2) def test_suite(): from unittest import TestSuite, makeSuite + suite = TestSuite() suite.addTest(makeSuite(TestPortlets)) return suite diff --git a/Products/ZenWidgets/tests/test_messaging.py b/Products/ZenWidgets/tests/test_messaging.py index 99a859d360..54617b1863 100644 --- a/Products/ZenWidgets/tests/test_messaging.py +++ b/Products/ZenWidgets/tests/test_messaging.py @@ -1,25 +1,27 @@ ############################################################################## -# +# # Copyright (C) Zenoss, Inc. 2009, all rights reserved. -# +# # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. -# +# ############################################################################## - from AccessControl.SecurityManagement import newSecurityManager from Products.ZenTestCase.BaseTestCase import BaseTestCase -from Products.ZenWidgets.messaging import MessageSender -from Products.ZenWidgets.messaging import BrowserMessageBox, UserMessageBox +from Products.ZenWidgets.messaging import ( + BrowserMessageBox, + MessageSender, + UserMessageBox, +) + class DummySession(dict): _p_changed = False class TestMessaging(BaseTestCase): - def afterSetUp(self): super(TestMessaging, self).afterSetUp() self.dmd.REQUEST.SESSION = DummySession() @@ -30,46 +32,45 @@ def _login(self, name): """ uf = self.dmd.zport.acl_users user = uf.getUserById(name) - if not hasattr(user, 'aq_base'): + if not hasattr(user, "aq_base"): user = user.__of__(uf) newSecurityManager(None, user) def test_sending_to_request(self): - MessageSender(self.dmd).sendToBrowser('title', 'This is a message') - us = self.dmd.ZenUsers.getUserSettings('tester') + MessageSender(self.dmd).sendToBrowser("title", "This is a message") + us = self.dmd.ZenUsers.getUserSettings("tester") self.assertEqual(len(us.messages()), 0) - self.assertEqual(len(self.dmd.REQUEST.SESSION['messages']), 1) - self.assertEqual(self.dmd.REQUEST.SESSION['messages'][0].body, - 'This is a message') + self.assertEqual(len(self.dmd.REQUEST.SESSION["messages"]), 1) + self.assertEqual( + self.dmd.REQUEST.SESSION["messages"][0].body, "This is a message" + ) def test_sending_to_user(self): - self._login('tester') - MessageSender(self.dmd).sendToUser('title', 'This is a message') - us = self.dmd.ZenUsers.getUserSettings('tester') + self._login("tester") + MessageSender(self.dmd).sendToUser("title", "This is a message") + us = self.dmd.ZenUsers.getUserSettings("tester") self.assertEqual(len(us.messages), 1) - self.assertEqual(us.messages()[0].body, 'This is a message') + self.assertEqual(us.messages()[0].body, "This is a message") def test_adapters(self): MessageSender(self.dmd).sendToBrowser( - 'title', - 'This is a browser message') - MessageSender(self.dmd).sendToUser( - 'title', - 'This is a user message') + "title", "This is a browser message" + ) + MessageSender(self.dmd).sendToUser("title", "This is a user message") brow = BrowserMessageBox(self.dmd) user = UserMessageBox(self.dmd) browmsgs = brow.get_messages() usermsgs = user.get_messages() self.assertEqual(len(browmsgs), 1) self.assertEqual(len(usermsgs), 1) - self.assertEqual(browmsgs[0].body, 'This is a browser message') - self.assertEqual(usermsgs[0].body, 'This is a user message') + self.assertEqual(browmsgs[0].body, "This is a browser message") + self.assertEqual(usermsgs[0].body, "This is a user message") def test_mark_as_read(self): - MessageSender(self.dmd).sendToBrowser('title', - 'This is a browser message') - MessageSender(self.dmd).sendToUser('title', - 'This is a user message') + MessageSender(self.dmd).sendToBrowser( + "title", "This is a browser message" + ) + MessageSender(self.dmd).sendToUser("title", "This is a user message") brow = BrowserMessageBox(self.dmd) user = UserMessageBox(self.dmd) @@ -85,11 +86,9 @@ def test_mark_as_read(self): self.assertEqual(len(user.get_unread()), 0) - - - def test_suite(): from unittest import TestSuite, makeSuite + suite = TestSuite() suite.addTest(makeSuite(TestMessaging)) return suite diff --git a/Products/Zuul/facades/devicefacade.py b/Products/Zuul/facades/devicefacade.py index 166efd6471..d63879b82f 100644 --- a/Products/Zuul/facades/devicefacade.py +++ b/Products/Zuul/facades/devicefacade.py @@ -7,55 +7,74 @@ # ############################################################################## - -import socket -import re -import os import logging -import subprocess +import re +import socket + from collections import OrderedDict from itertools import imap + +import six + +from AccessControl import getSecurityManager +from Acquisition import aq_base +from Products.AdvancedQuery import Eq, Or, Generic, And, MatchGlob from ZODB.transact import transact -from zope.interface import implements +from zope.component import getMultiAdapter from zope.event import notify -from zope.component import getMultiAdapter, queryUtility -from Products.AdvancedQuery import Eq, Or, Generic, And, MatchGlob -from Products.Zuul.catalog.interfaces import IComponentFieldSpec -from Products.Zuul.decorators import info -from Products.Zuul.utils import unbrain -from Products.Zuul.facades import TreeFacade -from Products.Zuul.catalog.component_catalog import get_component_field_spec, pad_numeric_values_for_indexing -from Products.Zuul.catalog.interfaces import IModelCatalogTool -from Products.Zuul.interfaces import IDeviceFacade, IInfo, ITemplateNode, IMetricServiceGraphDefinition +from zope.interface import implements + +from Products.DataCollector.Plugins import ( + CoreImporter, + loadPlugins, + PackImporter, +) from Products.Jobber.jobs import FacadeMethodJob -from Products.Zuul.tree import SearchResults -from Products.DataCollector.Plugins import CoreImporter, PackImporter, loadPlugins -from Products.ZenModel.DeviceOrganizer import DeviceOrganizer +from Products.ZenCollector.configcache.api import ConfigCache +from Products.ZenEvents.Event import Event +from Products.ZenMessaging.ChangeEvents.events import ( + ObjectAddedToOrganizerEvent, + ObjectRemovedFromOrganizerEvent, +) from Products.ZenModel.ComponentGroup import ComponentGroup -from Products.ZenModel.DeviceGroup import DeviceGroup -from Products.ZenModel.System import System -from Products.ZenModel.Location import Location from Products.ZenModel.DeviceClass import DeviceClass +from Products.ZenModel.DeviceGroup import DeviceGroup from Products.ZenModel.Device import Device -from Products.ZenMessaging.ChangeEvents.events import ObjectAddedToOrganizerEvent, \ - ObjectRemovedFromOrganizerEvent -from Products.Zuul import getFacade -from Products.Zuul.exceptions import DatapointNameConfict -from Products.Zuul.utils import ZuulMessageFactory as _t, UncataloguedObjectException -from Products.Zuul.interfaces import IDeviceCollectorChangeEvent -from Products.Zuul.catalog.events import IndexingEvent +from Products.ZenModel.Location import Location +from Products.ZenModel.System import System +from Products.ZenModel.ZenossSecurity import ZEN_VIEW from Products.ZenUtils.IpUtil import isip, getHostByName from Products.ZenUtils.Utils import getObjectsFromCatalog -from Products.ZenEvents.Event import Event -from Products.ZenUtils.Utils import binPath, zenPath -from Acquisition import aq_base -from Products.Zuul.infos.metricserver import MultiContextMetricServiceGraphDefinition -from AccessControl import getSecurityManager -from Products.ZenModel.ZenossSecurity import ZEN_VIEW +from Products.Zuul.catalog.component_catalog import ( + get_component_field_spec, + pad_numeric_values_for_indexing, +) +from Products.Zuul.catalog.events import IndexingEvent +from Products.Zuul.catalog.interfaces import IModelCatalogTool +from Products.Zuul.decorators import info +from Products.Zuul.exceptions import DatapointNameConfict +from Products.Zuul.facades import TreeFacade +from Products.Zuul import getFacade +from Products.Zuul.infos.metricserver import ( + MultiContextMetricServiceGraphDefinition, +) +from Products.Zuul.interfaces import IDeviceCollectorChangeEvent +from Products.Zuul.interfaces import ( + IDeviceFacade, + IInfo, + ITemplateNode, + IMetricServiceGraphDefinition, +) +from Products.Zuul.tree import SearchResults +from Products.Zuul.utils import unbrain +from Products.Zuul.utils import ( + UncataloguedObjectException, + ZuulMessageFactory as _t, +) iszprop = re.compile("z[A-Z]").match -log = logging.getLogger('zen.DeviceFacade') +log = logging.getLogger("zen.DeviceFacade") class DeviceCollectorChangeEvent(object): @@ -92,6 +111,7 @@ class DeviceFacade(TreeFacade): """ Facade for device stuff. """ + implements(IDeviceFacade) def _classFactory(self, contextUid): @@ -103,25 +123,34 @@ def _root(self): @property def _instanceClass(self): - return 'Products.ZenModel.Device.Device' + return "Products.ZenModel.Device.Device" def setInfo(self, uid, data): - """ - """ + """ """ super(DeviceFacade, self).setInfo(uid, data) obj = self._getObject(uid) if isinstance(obj, Device): obj.index_object() notify(IndexingEvent(obj)) - def findComponentIndex(self, componentUid, uid=None, meta_type=None, - sort='name', dir='ASC', name=None): - brains, total = self._typecatComponentBrains(uid=uid, meta_type=meta_type, sort=sort, dir=dir, name=name) + def findComponentIndex( + self, + componentUid, + uid=None, + meta_type=None, + sort="name", + dir="ASC", + name=None, + ): + brains, total = self._typecatComponentBrains( + uid=uid, meta_type=meta_type, sort=sort, dir=dir, name=name + ) if brains is None: - comps = self._componentSearch(uid=uid, meta_type=meta_type, sort=sort, - dir=dir, name=name) + comps = self._componentSearch( + uid=uid, meta_type=meta_type, sort=sort, dir=dir, name=name + ) for i, b in enumerate(comps): - if '/'.join(b._object.getPrimaryPath())==componentUid: + if "/".join(b._object.getPrimaryPath()) == componentUid: return i else: for i, b in enumerate(brains): @@ -130,8 +159,8 @@ def findComponentIndex(self, componentUid, uid=None, meta_type=None, def _filterComponents(self, comps, keys, query): """ - Returns a list of components where one of the attributes in keys contains - the query (case-insensitive). + Returns a list of components where one of the attributes in keys + contains the query (case-insensitive). @type comps: SearchResults @param comps: All the Components for this query @@ -148,7 +177,7 @@ def _filterComponents(self, comps, keys, query): keep = False for key in keys: # non searchable fields - if key in ('uid', 'uuid', 'events', 'status', 'severity'): + if key in ("uid", "uuid", "events", "status", "severity"): continue val = getattr(comp, key, None) if not val: @@ -164,8 +193,18 @@ def _filterComponents(self, comps, keys, query): results.append(comp) return results - def _typecatComponentBrains(self, uid=None, types=(), meta_type=(), start=0, - limit=None, sort='name', dir='ASC', name=None, keys=()): + def _typecatComponentBrains( + self, + uid=None, + types=(), + meta_type=(), + start=0, + limit=None, + sort="name", + dir="ASC", + name=None, + keys=(), + ): obj = self._getObject(uid) spec = get_component_field_spec(meta_type) if spec is None: @@ -177,31 +216,41 @@ def _typecatComponentBrains(self, uid=None, types=(), meta_type=(), start=0, # Fall back to slow queries and sorting return None, 0 sortspec = ((sort, dir),) - querySet = [Generic('path', uid)] + querySet = [Generic("path", uid)] if name: - querySet.append(Or(*(MatchGlob(field, '*%s*' % name) for field in spec.fields))) + querySet.append( + Or(*(MatchGlob(field, "*%s*" % name) for field in spec.fields)) + ) brains = typecat.evalAdvancedQuery(And(*querySet), sortspec) total = len(brains) if limit is None: brains = brains[start:] else: - brains = brains[start:start+limit] + brains = brains[start : start + limit] return brains, total - def _typecatComponentPostProcess(self, brains, total, sort='name', reverse=False): + def _typecatComponentPostProcess( + self, brains, total, sort="name", reverse=False + ): hash_ = str(total) comps = map(IInfo, map(unbrain, brains)) # fetch any rrd data necessary self.bulkLoadMetricData(comps) # Do one big lookup of component events and add to the result objects - showSeverityIcon = self.context.dmd.UserInterfaceSettings.getInterfaceSettings().get('showEventSeverityIcons') + showSeverityIcon = ( + self.context.dmd.UserInterfaceSettings.getInterfaceSettings().get( + "showEventSeverityIcons" + ) + ) if showSeverityIcon: uuids = [r.uuid for r in comps] - zep = getFacade('zep') + zep = getFacade("zep") severities = zep.getWorstSeverity(uuids) for r in comps: r.setWorstEventSeverity(severities[r.uuid]) - sortedComps = sorted(comps, key=lambda x: getattr(x, sort), reverse=reverse) + sortedComps = sorted( + comps, key=lambda x: getattr(x, sort), reverse=reverse + ) return SearchResults(iter(sortedComps), total, hash_, False) # Get components from model catalog. Not used for now @@ -211,33 +260,51 @@ def _get_component_brains_from_model_catalog(self, uid, meta_type=()): query = {} if meta_type: query["meta_type"] = meta_type - query["objectImplements"] = "Products.ZenModel.DeviceComponent.DeviceComponent" + query["objectImplements"] = ( + "Products.ZenModel.DeviceComponent.DeviceComponent" + ) query["deviceId"] = uid model_query_results = model_catalog.search(query=query) - brains = [ brain for brain in model_query_results.results ] + brains = list(model_query_results.results) return brains - def _componentSearch(self, uid=None, types=(), meta_type=(), start=0, - limit=None, sort='name', dir='ASC', name=None, keys=()): - reverse = dir=='DESC' - if isinstance(meta_type, basestring) and get_component_field_spec(meta_type) is not None: - brains, total = self._typecatComponentBrains(uid, types, meta_type, start, - limit, sort, dir, name, keys) + def _componentSearch( + self, + uid=None, + types=(), + meta_type=(), + start=0, + limit=None, + sort="name", + dir="ASC", + name=None, + keys=(), + ): + reverse = dir == "DESC" + if ( + isinstance(meta_type, six.string_types) + and get_component_field_spec(meta_type) is not None + ): + brains, total = self._typecatComponentBrains( + uid, types, meta_type, start, limit, sort, dir, name, keys + ) if brains is not None: - return self._typecatComponentPostProcess(brains, total, sort, reverse) - if isinstance(meta_type, basestring): + return self._typecatComponentPostProcess( + brains, total, sort, reverse + ) + if isinstance(meta_type, six.string_types): meta_type = (meta_type,) - if isinstance(types, basestring): + if isinstance(types, six.string_types): types = (types,) querySet = [] if meta_type: - querySet.append(Or(*(Eq('meta_type', t) for t in meta_type))) - querySet.append(Generic('getAllPaths', uid)) + querySet.append(Or(*(Eq("meta_type", t) for t in meta_type))) + querySet.append(Generic("getAllPaths", uid)) query = And(*querySet) obj = self._getObject(uid) cat = obj.device().componentSearch - if 'getAllPaths' not in cat.indexes(): + if "getAllPaths" not in cat.indexes(): obj.device()._createComponentSearchPathIndex() brains = cat.evalAdvancedQuery(query) @@ -247,8 +314,12 @@ def _componentSearch(self, uid=None, types=(), meta_type=(), start=0, try: comps.append(IInfo(unbrain(brain))) except Exception: - log.warn('There is broken component "%s" in componentSearch catalog on %s device.', - brain.id, obj.device().id) + log.warn( + 'There is broken component "%s" in componentSearch ' + "catalog on %s device.", + brain.id, + obj.device().id, + ) # filter the components if name is not None: @@ -269,32 +340,55 @@ def componentSortKey(parent): return pad_numeric_values_for_indexing(val) # sort the components - sortedResults = list(sorted(comps, key=componentSortKey, reverse=reverse)) + sortedResults = sorted(comps, key=componentSortKey, reverse=reverse) # limit the search results to the specified range if limit is None: pagedResult = sortedResults[start:] else: - pagedResult = sortedResults[start:start + limit] + pagedResult = sortedResults[start : start + limit] # fetch any rrd data necessary self.bulkLoadMetricData(pagedResult) # Do one big lookup of component events and add to the result objects - showSeverityIcon = self.context.dmd.UserInterfaceSettings.getInterfaceSettings().get('showEventSeverityIcons') + showSeverityIcon = ( + self.context.dmd.UserInterfaceSettings.getInterfaceSettings().get( + "showEventSeverityIcons" + ) + ) if showSeverityIcon: uuids = [r.uuid for r in pagedResult] - zep = getFacade('zep') + zep = getFacade("zep") severities = zep.getWorstSeverity(uuids) for r in pagedResult: r.setWorstEventSeverity(severities[r.uuid]) return SearchResults(iter(pagedResult), total, hash_, False) - def getComponents(self, uid=None, types=(), meta_type=(), start=0, - limit=None, sort='name', dir='ASC', name=None, keys=()): - return self._componentSearch(uid, types, meta_type, start, limit, - sort, dir, name=name, keys=keys) + def getComponents( + self, + uid=None, + types=(), + meta_type=(), + start=0, + limit=None, + sort="name", + dir="ASC", + name=None, + keys=(), + ): + return self._componentSearch( + uid, + types, + meta_type, + start, + limit, + sort, + dir, + name=name, + keys=keys, + ) def bulkLoadMetricData(self, infos): """ @@ -305,19 +399,21 @@ def bulkLoadMetricData(self, infos): if len(infos) == 0: return datapoints = set() - indexedInfos = dict() - for info in infos: - indexedInfos[info._object.getResourceKey()] = info - if hasattr(info, "dataPointsToFetch"): - [datapoints.add(dp) for dp in info.dataPointsToFetch] + indexedInfos = {} + for inf in infos: + indexedInfos[inf._object.getResourceKey()] = inf + if hasattr(inf, "dataPointsToFetch"): + [datapoints.add(dp) for dp in inf.dataPointsToFetch] # in case no metrics were asked for if len(datapoints) == 0: return # get the metric facade - mfacade = getFacade('metric', self._dmd) + mfacade = getFacade("metric", self._dmd) # metric facade expects zenmodel objects or uids - results = mfacade.getMultiValues([i._object for i in infos], datapoints, returnSet="LAST") + results = mfacade.getMultiValues( + [i._object for i in infos], datapoints, returnSet="LAST" + ) # assign the metrics to the info objects for resourceKey, record in results.iteritems(): @@ -332,14 +428,21 @@ def _get_component_types_from_model_catalog(self, uid): componentTypes = {} uuidMap = {} model_catalog = IModelCatalogTool(self.context.dmd) - model_query = Eq('objectImplements', "Products.ZenModel.DeviceComponent.DeviceComponent") + model_query = Eq( + "objectImplements", + "Products.ZenModel.DeviceComponent.DeviceComponent", + ) model_query = And(model_query, Eq("deviceId", uid)) - model_query_results = model_catalog.search(query=model_query, fields=["uuid", "meta_type"]) + model_query_results = model_catalog.search( + query=model_query, fields=["uuid", "meta_type"] + ) for brain in model_query_results.results: uuidMap[brain.uuid] = brain.meta_type - compType = componentTypes.setdefault(brain.meta_type, { 'count' : 0, 'severity' : 0 }) - compType['count'] += 1 + compType = componentTypes.setdefault( + brain.meta_type, {"count": 0, "severity": 0} + ) + compType["count"] += 1 return (componentTypes, uuidMap) def _get_component_types_from_zcatalog(self, uid): @@ -349,8 +452,10 @@ def _get_component_types_from_zcatalog(self, uid): dev = self._getObject(uid) for brain in dev.componentSearch(): uuidMap[brain.getUUID] = brain.meta_type - compType = componentTypes.setdefault(brain.meta_type, { 'count' : 0, 'severity' : 0 }) - compType['count'] += 1 + compType = componentTypes.setdefault( + brain.meta_type, {"count": 0, "severity": 0} + ) + compType["count"] += 1 return (componentTypes, uuidMap) def getComponentTree(self, uid): @@ -362,40 +467,53 @@ def getComponentTree(self, uid): if not uuidMap: return [] - zep = getFacade('zep') - showSeverityIcon = self.context.dmd.UserInterfaceSettings.getInterfaceSettings().get('showEventSeverityIcons') + zep = getFacade("zep") + showSeverityIcon = ( + self.context.dmd.UserInterfaceSettings.getInterfaceSettings().get( + "showEventSeverityIcons" + ) + ) if showSeverityIcon: severities = zep.getWorstSeverity(uuidMap.keys()) for uuid, sev in severities.iteritems(): compType = componentTypes[uuidMap[uuid]] - compType['severity'] = max(compType['severity'], sev) + compType["severity"] = max(compType["severity"], sev) result = [] for name, compType in componentTypes.iteritems(): - result.append({ - 'type' : name, - 'count' : compType['count'], - 'severity' : EventManagerBase.severities.get(compType['severity'], 0).lower() - }) + result.append( + { + "type": name, + "count": compType["count"], + "severity": EventManagerBase.severities.get( + compType["severity"], 0 + ).lower(), + } + ) return result def getDeviceUids(self, uid): cat = IModelCatalogTool(self._getObject(uid)) - return [b.getPath() for b in cat.search('Products.ZenModel.Device.Device')] + return [ + b.getPath() for b in cat.search("Products.ZenModel.Device.Device") + ] def deleteComponents(self, uids): comps = imap(self._getObject, uids) for comp in comps: if comp.isLockedFromDeletion(): - raise Exception("Component %s is locked from deletion" % comp.id) + raise Exception( + "Component %s is locked from deletion" % comp.id + ) - if hasattr(comp, 'manage_deleteComponent'): + if hasattr(comp, "manage_deleteComponent"): comp.manage_deleteComponent() else: - raise Exception("%s %s cannot be manually deleted" % - (getattr(comp,'meta_type','component'),comp.id)) - + raise Exception( + "%s %s cannot be manually deleted" + % (getattr(comp, "meta_type", "component"), comp.id) + ) def _deleteDevices(self, uids, deleteEvents=False, deletePerf=True): @transact @@ -405,26 +523,29 @@ def dbDeleteDevices(uids): for dev in devs: devid = dev.getId() deletedIds.append(devid) - parent = dev.getPrimaryParent() - dev.deleteDevice(deleteStatus=deleteEvents, - deletePerf=deletePerf) + dev.deleteDevice( + deleteStatus=deleteEvents, deletePerf=deletePerf + ) return deletedIds def uidChunks(uids, chunksize=10): i = 0 maxi = len(uids) while i < maxi: - nexti = i+chunksize + nexti = i + chunksize yield uids[i:nexti] i = nexti deletedIds = sum(map(dbDeleteDevices, uidChunks(uids)), []) for devid in deletedIds: - self._dmd.ZenEventManager.sendEvent(Event( - summary='Deleted device: '+devid, - severity=2, #info - eventClass='/Change/Remove', #zEventAction=history - device=devid)) + self._dmd.ZenEventManager.sendEvent( + Event( + summary="Deleted device: " + devid, + severity=2, # info + eventClass="/Change/Remove", # zEventAction=history + device=devid, + ) + ) def deleteDevices(self, uids, deleteEvents=False, deletePerf=True): """ @@ -441,15 +562,16 @@ def deleteDevices(self, uids, deleteEvents=False, deletePerf=True): @info def removeDevices(self, uids, organizer): # Resolve target if a path - if isinstance(organizer, basestring): + if isinstance(organizer, six.string_types): organizer = self._getObject(organizer) - assert isinstance(organizer, DeviceOrganizer) devs = map(self._getObject, uids) removed = [] if isinstance(organizer, DeviceGroup): for dev in devs: oldGroupNames = dev.getDeviceGroupNames() - newGroupNames = self._removeOrganizer(organizer, list(oldGroupNames)) + newGroupNames = self._removeOrganizer( + organizer, list(oldGroupNames) + ) if oldGroupNames != newGroupNames: dev.setGroups(newGroupNames) notify(ObjectRemovedFromOrganizerEvent(dev, organizer)) @@ -458,7 +580,9 @@ def removeDevices(self, uids, organizer): elif isinstance(organizer, System): for dev in devs: oldSystemNames = dev.getSystemNames() - newSystemNames = self._removeOrganizer(organizer, list(oldSystemNames)) + newSystemNames = self._removeOrganizer( + organizer, list(oldSystemNames) + ) if newSystemNames != oldSystemNames: dev.setSystems(newSystemNames) notify(ObjectRemovedFromOrganizerEvent(dev, organizer)) @@ -483,30 +607,42 @@ def getUserCommands(self, uid=None): org = self._getObject(uid) return org.getUserCommands() - def setProductInfo(self, uid, hwManufacturer=None, hwProductName=None, - osManufacturer=None, osProductName=None): + def setProductInfo( + self, + uid, + hwManufacturer=None, + hwProductName=None, + osManufacturer=None, + osProductName=None, + ): dev = self._getObject(uid) - dev.setProductInfo(hwManufacturer=hwManufacturer, - hwProductName=hwProductName, - osManufacturer=osManufacturer, - osProductName=osProductName) + dev.setProductInfo( + hwManufacturer=hwManufacturer, + hwProductName=hwProductName, + osManufacturer=osManufacturer, + osProductName=osProductName, + ) def setProductionState(self, uids, state, asynchronous=False): if asynchronous: self._dmd.JobManager.addJob( FacadeMethodJob, - description="Set state %s for %s" % (state, ','.join(uids)), - kwargs=dict( - facadefqdn="Products.Zuul.facades.devicefacade.DeviceFacade", - method="_setProductionState", - uids=uids, - state=state - )) + description="Set state %s for %s" % (state, ",".join(uids)), + kwargs={ + "facadefqdn": ( + "Products.Zuul.facades.devicefacade.DeviceFacade" + ), + "method": "_setProductionState", + "uids": uids, + "state": state, + }, + ) else: self._setProductionState(uids, state) - def setLockState(self, uids, deletion=False, updates=False, - sendEvent=False): + def setLockState( + self, uids, deletion=False, updates=False, sendEvent=False + ): devs = imap(self._getObject, uids) for dev in devs: if deletion or updates: @@ -522,15 +658,18 @@ def setMonitor(self, uids, monitor=False): for comp in comps: IInfo(comp).monitor = monitor # update the componentSearch catalog - comp.index_object(idxs=('monitored',)) + comp.index_object(idxs=("monitored",)) # update the global catalog as well - notify(IndexingEvent(comp, idxs=('monitored',))) + notify(IndexingEvent(comp, idxs=("monitored",))) def pushChanges(self, uids): devs = imap(self._getObject, uids) + if not devs: + return + configcache = ConfigCache.new() for dev in devs: - dev.pushConfig() + configcache.update_device(dev) def modelDevices(self, uids): devs = imap(self._getObject, uids) @@ -553,9 +692,8 @@ def resumeCollection(self, uid): def _moveDevices(self, uids, target): # Resolve target if a path - if isinstance(target, basestring): + if isinstance(target, six.string_types): target = self._getObject(target) - assert isinstance(target, DeviceOrganizer) devs = (self._getObject(uid) for uid in uids) targetname = target.getOrganizerName() moved_devices_count = 0 @@ -578,24 +716,28 @@ def _moveDevices(self, uids, target): elif isinstance(target, Location): for dev in devs: if dev.location(): - notify(ObjectRemovedFromOrganizerEvent(dev, dev.location())) + notify( + ObjectRemovedFromOrganizerEvent(dev, dev.location()) + ) dev.setLocation(targetname) notify(ObjectAddedToOrganizerEvent(dev, target)) success = True elif isinstance(target, DeviceClass): - moved_devices_count = self._dmd.Devices.moveDevices(targetname,[dev.id for dev in devs]) + moved_devices_count = self._dmd.Devices.moveDevices( + targetname, [dev.id for dev in devs] + ) success = True remodel_required = True result = { - 'success': success, - 'message': 'The %s devices have been moved' % moved_devices_count, - 'remodel_required': remodel_required + "success": success, + "message": "The %s devices have been moved" % moved_devices_count, + "remodel_required": remodel_required, } return result def _setProductionState(self, uids, state): - if isinstance(uids, basestring): + if isinstance(uids, six.string_types): uids = (uids,) for uid in uids: dev = self._getObject(uid) @@ -604,9 +746,8 @@ def _setProductionState(self, uids, state): def doesMoveRequireRemodel(self, uid, target): # Resolve target if a path - if isinstance(target, basestring): + if isinstance(target, six.string_types): target = self._getObject(target) - assert isinstance(target, DeviceClass) targetClass = target.getPythonDeviceClass() dev = self._getObject(uid) return dev and dev.__class__ != targetClass @@ -614,20 +755,29 @@ def doesMoveRequireRemodel(self, uid, target): @info def moveDevices(self, uids, target, asynchronous=True): if asynchronous: - devdesc = ("device %s" % uids[0].split('/')[-1] if len(uids)==1 - else "%s devices" % len(uids)) + devdesc = ( + "device %s" % uids[0].split("/")[-1] + if len(uids) == 1 + else "%s devices" % len(uids) + ) return self._dmd.JobManager.addJob( - FacadeMethodJob, description="Move %s to %s" % (devdesc, target), - kwargs=dict( - facadefqdn="Products.Zuul.facades.devicefacade.DeviceFacade", - method="_moveDevices", - uids=uids, - target=target - )) + FacadeMethodJob, + description="Move %s to %s" % (devdesc, target), + kwargs={ + "facadefqdn": ( + "Products.Zuul.facades.devicefacade.DeviceFacade" + ), + "method": "_moveDevices", + "uids": uids, + "target": target, + }, + ) else: return self._moveDevices(uids, target) - def getDeviceByIpAddress(self, deviceName, collector="localhost", ipAddress=""): + def getDeviceByIpAddress( + self, deviceName, collector="localhost", ipAddress="" + ): # convert device name to an ip address if not ipAddress: if isip(deviceName): @@ -641,8 +791,10 @@ def getDeviceByIpAddress(self, deviceName, collector="localhost", ipAddress=""): # find a device with the same ip on the same collector cat = IModelCatalogTool(self.context.Devices) - query = And(Eq('text_ipAddress', ipAddress), - Eq('objectImplements', 'Products.ZenModel.Device.Device')) + query = And( + Eq("text_ipAddress", ipAddress), + Eq("objectImplements", "Products.ZenModel.Device.Device"), + ) search_results = cat.search(query=query) for brain in search_results.results: @@ -656,82 +808,123 @@ def getDeviceByName(self, deviceName): def setCollector(self, uids, collector, moveData=False, asynchronous=True): # Keep 'moveData' in signature even though it's unused now if asynchronous: - prettyUids = ", ".join([uid.split('/')[-1] for uid in uids]) + prettyUids = ", ".join([uid.split("/")[-1] for uid in uids]) return self._dmd.JobManager.addJob( - FacadeMethodJob, description="Move devices %s to collector %s" % (prettyUids, collector), - kwargs=dict( - facadefqdn="Products.Zuul.facades.devicefacade.DeviceFacade", - method="_setCollector", - uids=uids, - collector=collector - )) + FacadeMethodJob, + description="Move devices %s to collector %s" + % (prettyUids, collector), + kwargs={ + "facadefqdn": ( + "Products.Zuul.facades.devicefacade.DeviceFacade" + ), + "method": "_setCollector", + "uids": uids, + "collector": collector, + }, + ) else: return self._setCollector(uids, collector) - def _setCollector(self, uids, collector, moveData=False, asynchronous=True): + def _setCollector( + self, uids, collector, moveData=False, asynchronous=True + ): movedDevices = [] for uid in uids: info = self.getInfo(uid) - movedDevices.append({ - 'id': uid.split("/")[-1], - 'fromCollector': info.collector, - }) + movedDevices.append( + { + "id": uid.split("/")[-1], + "fromCollector": info.collector, + } + ) info.collector = collector - # If an event is desired at this point, use a DeviceCollectorChangeEvent here + # If an event is desired at this point, + # use a DeviceCollectorChangeEvent here @info - def addDevice(self, deviceName, deviceClass, title=None, snmpCommunity="", - snmpPort=161, manageIp="", model=False, collector='localhost', - rackSlot=0, productionState=1000, comments="", - hwManufacturer="", hwProductName="", osManufacturer="", - osProductName="", priority = 3, tag="", serialNumber="", - locationPath="", zCommandUsername="", zCommandPassword="", - zWinUser="", zWinPassword="", systemPaths=[], groupPaths=[], - zProperties={}, cProperties={}, - ): - zProps = dict(zSnmpCommunity=snmpCommunity, - zSnmpPort=snmpPort, - zCommandUsername=zCommandUsername, - zCommandPassword=zCommandPassword, - zWinUser=zWinUser, - zWinPassword=zWinPassword - ) + def addDevice( + self, + deviceName, + deviceClass, + title=None, + snmpCommunity="", + snmpPort=161, + manageIp="", + model=False, + collector="localhost", + rackSlot=0, + productionState=1000, + comments="", + hwManufacturer="", + hwProductName="", + osManufacturer="", + osProductName="", + priority=3, + tag="", + serialNumber="", + locationPath="", + zCommandUsername="", + zCommandPassword="", + zWinUser="", + zWinPassword="", + systemPaths=None, + groupPaths=None, + zProperties=None, + cProperties=None, + ): + systemPaths = systemPaths if systemPaths else [] + groupPaths = groupPaths if groupPaths else [] + zProperties = zProperties if zProperties else {} + cProperties = cProperties if cProperties else {} + zProps = { + "zSnmpCommunity": snmpCommunity, + "zSnmpPort": snmpPort, + "zCommandUsername": zCommandUsername, + "zCommandPassword": zCommandPassword, + "zWinUser": zWinUser, + "zWinPassword": zWinPassword, + } zProps.update(zProperties) model = model and "Auto" or "none" perfConf = self._dmd.Monitors.getPerformanceMonitor(collector) if perfConf.viewName() != collector: raise Exception("Collector `{}` does not exist".format(collector)) - jobrecords = perfConf.addCreateDeviceJob(deviceName=deviceName, - devicePath=deviceClass, - performanceMonitor=collector, - discoverProto=model, - manageIp=manageIp, - zProperties=zProps, - cProperties=cProperties, - rackSlot=rackSlot, - productionState=productionState, - comments=comments, - hwManufacturer=hwManufacturer, - hwProductName=hwProductName, - osManufacturer=osManufacturer, - osProductName=osProductName, - priority=priority, - tag=tag, - serialNumber=serialNumber, - locationPath=locationPath, - systemPaths=systemPaths, - groupPaths=groupPaths, - title=title) + jobrecords = perfConf.addCreateDeviceJob( + deviceName=deviceName, + devicePath=deviceClass, + performanceMonitor=collector, + discoverProto=model, + manageIp=manageIp, + zProperties=zProps, + cProperties=cProperties, + rackSlot=rackSlot, + productionState=productionState, + comments=comments, + hwManufacturer=hwManufacturer, + hwProductName=hwProductName, + osManufacturer=osManufacturer, + osProductName=osProductName, + priority=priority, + tag=tag, + serialNumber=serialNumber, + locationPath=locationPath, + systemPaths=systemPaths, + groupPaths=groupPaths, + title=title, + ) return jobrecords - def remodel(self, deviceUid, collectPlugins='', background=True): - #fake_request will break not a background command - fake_request = {'CONTENT_TYPE': 'xml'} if background else None + def remodel(self, deviceUid, collectPlugins="", background=True): + # fake_request will break not a background command + fake_request = {"CONTENT_TYPE": "xml"} if background else None device = self._getObject(deviceUid) return device.getPerformanceServer().collectDevice( - device, background=background, collectPlugins=collectPlugins, - REQUEST=fake_request) + device, + background=background, + collectPlugins=collectPlugins, + REQUEST=fake_request, + ) def addLocalTemplate(self, deviceUid, templateId): """ @@ -745,8 +938,10 @@ def addLocalTemplate(self, deviceUid, templateId): def removeLocalTemplate(self, deviceUid, templateUid): """ Removes a local definition of a template on a device - @param string deviceUid: Absolute path to the device that has the template - @param string templateUid: Absolute path to the template we wish to remove + @param deviceUid: Absolute path to the device that has the template + @type deviceUid: str + @param templateUid: Absolute path to the template we wish to remove + @type templateUid: str """ device = self._getObject(deviceUid) template = self._getObject(templateUid) @@ -754,39 +949,101 @@ def removeLocalTemplate(self, deviceUid, templateUid): def getTemplates(self, id): object = self._getObject(id) - - if isinstance(object, Device): - rrdTemplates = object.getAvailableTemplates() - else: - rrdTemplates = object.getRRDTemplates() - # used to sort the templates - def byTitleOrId(left, right): - return cmp(left.titleOrId().lower(), right.titleOrId().lower()) + isDeviceClass = isinstance(object, DeviceClass) + if isDeviceClass: + pythonDeviceClass = object.getPythonDeviceClass() + + zDeviceTemplates = object.zDeviceTemplates + + rrdTemplates = object.getRRDTemplates() - for rrdTemplate in sorted(rrdTemplates, byTitleOrId): - uid = '/'.join(rrdTemplate.getPrimaryPath()) - # only show Bound Templates + templateNames = [] + boundTemplates = [] + unboundTemplates = [] + for rrdTemplate in rrdTemplates: + if isDeviceClass and not issubclass( + pythonDeviceClass, rrdTemplate.getTargetPythonClass() + ): + continue + templateNames.append(rrdTemplate.id) if rrdTemplate.id in object.zDeviceTemplates: - path = rrdTemplate.getUIPath() - - # if defined directly on the device do not show the path - if isinstance(object, Device) and object.titleOrId() in path: - path = _t('Locally Defined') - yield {'id': uid, - 'uid': uid, - 'path': path, - 'text': '%s (%s)' % (rrdTemplate.titleOrId(), path), - 'leaf': True - } + boundTemplates.append(rrdTemplate) + else: + unboundTemplates.append(rrdTemplate) + + # used to sort the templates + def byTitleOrId(obj): + return obj.titleOrId().lower() + + for rrdTemplate in list(unboundTemplates): + if rrdTemplate.id.endswith( + "-replacement" + ) or rrdTemplate.id.endswith("-addition"): + if ( + "-".join(rrdTemplate.id.split("-")[:-1]) + in zDeviceTemplates + ): + boundTemplates.append(rrdTemplate) + unboundTemplates.remove(rrdTemplate) + + def makenode(rrdTemplate, suborg=None): + uid = "/".join(rrdTemplate.getPrimaryPath()) + path = "" + + # for DeviceClasses show which are bound + if isinstance(object, DeviceClass): + if rrdTemplate.id in zDeviceTemplates: + path = "%s (%s)" % (path, _t("Bound")) + if rrdTemplate.id + "-replacement" in templateNames: + path = "%s (%s)" % (path, _t("Replaced")) + + # if defined directly on the device do not show the path + uiPath = rrdTemplate.getUIPath() + if (not isDeviceClass) and object.titleOrId() in uiPath: + path = "%s (%s)" % (path, _t("Locally Defined")) + else: + path = "%s (%s)" % (path, uiPath) + return { + "id": uid, + "uid": uid, + "path": path, + "text": "%s %s" % (rrdTemplate.titleOrId(), path), + "leaf": True, + } + + for rrdTemplate in sorted(boundTemplates, key=byTitleOrId): + yield makenode(rrdTemplate) + + if isDeviceClass: + available = [] + for rrdTemplate in sorted(unboundTemplates, key=byTitleOrId): + available.append(makenode(rrdTemplate, "Available")) + yield { + "id": "Available", + "text": "Available", + "leaf": False, + "children": available, + } def getLocalTemplates(self, uid): """ - Returns a dictionary of every template defined on the device specified by the uid + Returns a dictionary of every template defined on the device + specified by the uid. + @param string uid: absolute path of a device @returns [Dict] All the templates defined on this device """ - return [template for template in self.getTemplates(uid) if template['path'] == _t('Locally Defined')] + for template in self._getObject(uid).objectValues("RRDTemplate"): + uid = "/".join(template.getPrimaryPath()) + path = template.getUIPath() + yield { + "id": uid, + "uid": uid, + "path": path, + "text": "%s (%s)" % (template.titleOrId(), path), + "leaf": True, + } def getUnboundTemplates(self, uid): return self._getBoundTemplates(uid, False) @@ -819,8 +1076,14 @@ def setBoundTemplates(self, uid, templateIds): if intersection: dp_name = intersection.pop() other_id = bound_dp_names[dp_name] - fmt = "both {template.id} and {other_id} have a datapoint named {dp_name}" - raise DatapointNameConfict(fmt.format(template=template, other_id=other_id, dp_name=dp_name)) + raise DatapointNameConfict( + "both {template.id} and {other_id} have a " + "datapoint named {dp_name}".format( + template=template, + other_id=other_id, + dp_name=dp_name, + ) + ) for dp_name in dp_names: bound_dp_names[dp_name] = template.id @@ -829,13 +1092,14 @@ def setBoundTemplates(self, uid, templateIds): def resetBoundTemplates(self, uid): obj = self._getObject(uid) # make sure we have bound templates before we remove them - if obj.hasProperty('zDeviceTemplates'): + if obj.hasProperty("zDeviceTemplates"): obj.removeZDeviceTemplates() def getOverridableTemplates(self, uid): """ - A template is overrideable at the device if it is bound to the device and - we have not already overridden it. + A template is overrideable at the device if it is bound to the + device and we have not already overridden it. + @param string uid: the unique id of a device @returns a list of all available templates for the given uid """ @@ -843,19 +1107,25 @@ def getOverridableTemplates(self, uid): templates = obj.getRRDTemplates() for template in templates: # see if the template is already overridden here - if not obj.id in template.getPhysicalPath(): + if obj.id not in template.getPhysicalPath(): try: yield ITemplateNode(template) except UncataloguedObjectException: pass - def addLocationOrganizer(self, contextUid, id, description = '', address=''): - org = super(DeviceFacade, self).addOrganizer(contextUid, id, description) + def addLocationOrganizer(self, contextUid, id, description="", address=""): + org = super(DeviceFacade, self).addOrganizer( + contextUid, id, description + ) org.address = address return org - def addDeviceClass(self, contextUid, id, description = '', connectionInfo=None): - org = super(DeviceFacade, self).addOrganizer(contextUid, id, description) + def addDeviceClass( + self, contextUid, id, description="", connectionInfo=None + ): + org = super(DeviceFacade, self).addOrganizer( + contextUid, id, description + ) if connectionInfo: org.connectionInfo = connectionInfo return org @@ -872,16 +1142,22 @@ def getModelerPluginDocStrings(self, uid): coreImporter = CoreImporter() for plugin in plugins: try: - module = coreImporter.importModule(plugin.package, plugin.modPath) + module = coreImporter.importModule( + plugin.package, plugin.modPath + ) except ImportError: try: - module = packImporter.importModule(plugin.package, plugin.modPath) + module = packImporter.importModule( + plugin.package, plugin.modPath + ) except ImportError: # unable to import skip over this one continue pluginDocs = module.__doc__ if pluginDocs: - pluginDocs = '
' + pluginDocs.replace('\n', '\n
') + '
' + pluginDocs = ( + "
" + pluginDocs.replace("\n", "\n
") + "
" + ) docs[plugin.pluginName] = pluginDocs return docs @@ -891,7 +1167,9 @@ def getConnectionInfo(self, uid): deviceClass = obj if not isinstance(obj, DeviceClass): deviceClass = obj.deviceClass() - for prop in deviceClass.primaryAq().getZ('zCredentialsZProperties', []): + for prop in deviceClass.primaryAq().getZ( + "zCredentialsZProperties", [] + ): result.append(obj.exportZProperty(prop)) return result @@ -903,18 +1181,36 @@ def getGraphDefs(self, uid, drange): # definition. if hasattr(obj, "getGraphObjects"): for graph, ctx in obj.getGraphObjects(): - info = getMultiAdapter((graph, ctx), IMetricServiceGraphDefinition) + info = getMultiAdapter( + (graph, ctx), IMetricServiceGraphDefinition + ) # if there is a separate context display that as the title if ctx != obj: info._showContextTitle = True graphs.append(info) return graphs - def addIpRouteEntry(self, uid, dest, routemask, nexthopid, interface, - routeproto, routetype, userCreated): + def addIpRouteEntry( + self, + uid, + dest, + routemask, + nexthopid, + interface, + routeproto, + routetype, + userCreated, + ): device = self._getObject(uid) - device.os.addIpRouteEntry(dest, routemask, nexthopid, interface, - routeproto, routetype, userCreated) + device.os.addIpRouteEntry( + dest, + routemask, + nexthopid, + interface, + routeproto, + routetype, + userCreated, + ) def addIpInterface(self, uid, newId, userCreated): device = self._getObject(uid) @@ -941,41 +1237,47 @@ def getSoftware(self, uid): softwares = (IInfo(s) for s in obj.os.software.objectValuesGen()) return softwares - def getOverriddenObjectsList(self, uid, propname, relName='devices'): + def getOverriddenObjectsList(self, uid, propname, relName="devices"): obj = self._getObject(uid) objects = [] for inst in obj.getSubInstances(relName): if inst.isLocal(propname) and inst not in objects: proptype = inst.getPropertyType(propname) - objects.append({ - 'devicelink':inst.getPrimaryDmdId(), - 'props':self.maskPropertyPassword(inst, propname), - 'proptype':proptype - }) - if relName == 'devices': - objects[-1].update({ - 'objtype':relName, - 'name':inst.titleOrId(), - 'devicelink':inst.getPrimaryUrlPath() - }) + objects.append( + { + "devicelink": inst.getPrimaryDmdId(), + "props": self.maskPropertyPassword(inst, propname), + "proptype": proptype, + } + ) + if relName == "devices": + objects[-1].update( + { + "objtype": relName, + "name": inst.titleOrId(), + "devicelink": inst.getPrimaryUrlPath(), + } + ) for inst in obj.getOverriddenObjects(propname): proptype = inst.getPropertyType(propname) - objects.append({ - 'devicelink':inst.getPrimaryDmdId(), - 'props':self.maskPropertyPassword(inst, propname), - 'proptype':proptype - }) + objects.append( + { + "devicelink": inst.getPrimaryDmdId(), + "props": self.maskPropertyPassword(inst, propname), + "proptype": proptype, + } + ) return objects - def getOverriddenObjectsParent(self, uid, propname=''): + def getOverriddenObjectsParent(self, uid, propname=""): obj = self._getObject(uid) - if propname == '': - prop = '' - proptype = '' + if propname == "": + prop = "" + proptype = "" else: proptype = obj.getPropertyType(propname) prop = self.maskPropertyPassword(obj, propname) - return [{'devicelink':uid, 'props':prop, 'proptype':proptype}] + return [{"devicelink": uid, "props": prop, "proptype": proptype}] def getOverriddenZprops(self, uid, all=True, pfilt=iszprop): """ @@ -986,7 +1288,8 @@ def getOverriddenZprops(self, uid, all=True, pfilt=iszprop): if all: rootnode = obj.getZenRootNode() else: - if obj.id == obj.dmdRootName: return [] + if obj.id == obj.dmdRootName: + return [] rootnode = aq_base(obj) return sorted(prop for prop in rootnode.propertyIds() if pfilt(prop)) @@ -995,47 +1298,66 @@ def clearGeocodeCache(self): This clears the geocode cache by reseting the latlong property of all locations. """ - results = IModelCatalogTool(self._dmd.Locations).search('Products.ZenModel.Location.Location') + results = IModelCatalogTool(self._dmd.Locations).search( + "Products.ZenModel.Location.Location" + ) for brain in results: try: brain.getObject().latlong = None except Exception: - log.warn("Unable to clear the geocodecache from %s", brain.getPath()) + log.warn( + "Unable to clear the geocodecache from %s", brain.getPath() + ) @info def getGraphDefinitionsForComponent(self, uid): - graphDefs = dict() + graphDefs = {} obj = self._getObject(uid) if isinstance(obj, ComponentGroup): components = obj.getComponents() else: - components = list(getObjectsFromCatalog(obj.componentSearch, None, log)) + components = list( + getObjectsFromCatalog(obj.componentSearch, None, log) + ) for component in components: - current_def = [graphDef.id for graphDef, _ in component.getGraphObjects()] + current_def = [ + graphDef.id for graphDef, _ in component.getGraphObjects() + ] if component.meta_type in graphDefs: prev_def = graphDefs[component.meta_type] - graphDefs[component.meta_type] = prev_def + list(set(current_def) - set(prev_def)) + graphDefs[component.meta_type] = prev_def + list( + set(current_def) - set(prev_def) + ) else: graphDefs[component.meta_type] = current_def return graphDefs - def getComponentGraphs(self, uid, meta_type, graphId, allOnSame=False): + def getComponentGraphs( + self, uid, meta_type, graphId, limit, graphsOnSame, allOnSame=False + ): obj = self._getObject(uid) # get the components we are rendering graphs for query = {} - query['meta_type'] = meta_type + query["meta_type"] = meta_type if isinstance(obj, ComponentGroup): - components = [comp for comp in obj.getComponents() if comp.meta_type == meta_type] + components = [ + comp + for comp in obj.getComponents() + if comp.meta_type == meta_type + ] else: - components = list(getObjectsFromCatalog(obj.componentSearch, query, log)) + components = list( + getObjectsFromCatalog(obj.componentSearch, query, log) + ) graphDefault = None graphDict = {} - # find the graph for each component and a default graph for components without one + # Find the graph for each component and a default graph for + # components without one. for comp in components: - for graph, ctx in comp.getGraphObjects(): + for graph, _ in comp.getGraphObjects(): if graph.id == graphId: if not graphDefault: graphDefault = graph @@ -1045,15 +1367,25 @@ def getComponentGraphs(self, uid, meta_type, graphId, allOnSame=False): return [] if allOnSame: - return [MultiContextMetricServiceGraphDefinition(graphDefault, components)] + return [ + MultiContextMetricServiceGraphDefinition( + graphDefault, components, graphsOnSame + ) + ] graphs = [] for comp in components: graph = graphDict.get(comp.id) if graph: - info = getMultiAdapter((graph, comp), IMetricServiceGraphDefinition) + info = getMultiAdapter( + (graph, comp), IMetricServiceGraphDefinition + ) graphs.append(info) - return graphs + + return { + "data": graphs[limit["start"] : limit["end"]], + "data_length": len(graphs), + } def getDevTypes(self, uid): """ @@ -1067,12 +1399,14 @@ def getDevTypes(self, uid): for org in organizers: org_name = org.getOrganizerName() org_id = org.getPrimaryId() - if not hasattr(aq_base(org), 'devtypes') or not org.devtypes: - devtypes.append({ - 'value': org_id, - 'description': org_name, - 'protocol': "", - }) + if not hasattr(aq_base(org), "devtypes") or not org.devtypes: + devtypes.append( + { + "value": org_id, + "description": org_name, + "protocol": "", + } + ) continue for t in org.devtypes: try: @@ -1088,33 +1422,45 @@ def getDevTypes(self, uid): # can allow the zenpack to be backwards compatible # ZEN-19596: Add support for Cluster and any sub-class for # Windows and Cluster - ms_dev_classes = ('/Server/Microsoft/{}'.format(cls) - for cls in ('Windows', 'Cluster')) - matched_org_to_dev_cls = any(org_name.startswith(cls) - for cls in ms_dev_classes) - if matched_org_to_dev_cls and ptcl == 'WMI': + ms_dev_classes = ( + "/Server/Microsoft/{}".format(cls) + for cls in ("Windows", "Cluster") + ) + matched_org_to_dev_cls = any( + org_name.startswith(cls) for cls in ms_dev_classes + ) + if matched_org_to_dev_cls and ptcl == "WMI": ptcl = "WinRM" - devtypes.append({ - 'value': org_id, - 'description': desc, - 'protocol': ptcl, - }) - return sorted(devtypes, key=lambda x: x.get('description')) + devtypes.append( + { + "value": org_id, + "description": desc, + "protocol": ptcl, + } + ) + return sorted(devtypes, key=lambda x: x.get("description")) def getDeviceClasses(self, allClasses=True): """ Get a list of device classes. - If not allClasses, get only device classes which should use the standard - device creation job. + + If not allClasses, get only device classes which should use the + standard device creation job. """ devices = self._dmd.Devices deviceClasses = [] user = getSecurityManager().getUser() + def getOrganizerNames(org, user, deviceClasses): - if user.has_permission(ZEN_VIEW, org) and allClasses or org.getZ('zUsesStandardDeviceCreationJob', True): + if ( + user.has_permission(ZEN_VIEW, org) + and allClasses + or org.getZ("zUsesStandardDeviceCreationJob", True) + ): deviceClasses.append(org.getOrganizerName()) for suborg in org.children(checkPerm=False): getOrganizerNames(suborg, user, deviceClasses) + getOrganizerNames(devices, user, deviceClasses) deviceClasses.sort(key=lambda x: x.lower()) return deviceClasses @@ -1128,7 +1474,7 @@ def getAllCredentialsProps(self): props[prop] = prop for org in self.context.dmd.Devices.getSubOrganizers(): for prop in org.zCredentialsZProperties: - props[prop] = (prop, org.exportZProperty(prop)['type']) + props[prop] = (prop, org.exportZProperty(prop)["type"]) return props.values() def maskPropertyPassword(self, inst, propname): diff --git a/Products/Zuul/facades/metricfacade.py b/Products/Zuul/facades/metricfacade.py index d6ab0d77ad..25e3de759c 100644 --- a/Products/Zuul/facades/metricfacade.py +++ b/Products/Zuul/facades/metricfacade.py @@ -634,7 +634,6 @@ def _buildWildCardMetrics( format=format, tags={"contextUUID": ["*"]}, rate=isRate, - name=device.getResourceKey() + metricName, ) return metric diff --git a/Products/Zuul/facades/networkfacade.py b/Products/Zuul/facades/networkfacade.py index 8194d96f7a..990af99c13 100644 --- a/Products/Zuul/facades/networkfacade.py +++ b/Products/Zuul/facades/networkfacade.py @@ -20,7 +20,7 @@ from Products.Zuul.facades import TreeFacade from Products.Zuul.interfaces import IInfo, ITreeFacade, INetworkFacade from Products.Zuul.decorators import info -from Products.Zuul.utils import unbrain +from Products.Zuul.utils import try_unbrain from Products.Zuul.tree import SearchResults from zenoss.protocols.protobufs.zep_pb2 import ( SEVERITY_CRITICAL, SEVERITY_ERROR, SEVERITY_WARNING, @@ -122,7 +122,6 @@ def _assignSnmpStatuses(self, infos, snmpStatuses): def getIpAddresses(self, limit=0, start=0, sort='ipAddressAsInt', dir='DESC', params=None, uid=None, criteria=()): - infos = [] cat = IModelCatalogTool(self._getObject(uid)) reverse = dir=='DESC' @@ -130,10 +129,13 @@ def getIpAddresses(self, limit=0, start=0, sort='ipAddressAsInt', dir='DESC', start=start, limit=limit, orderby=sort, reverse=reverse) - for brain in brains: - infos.append(IInfo(unbrain(brain))) + infos = [ + IInfo(obj) + for obj in (try_unbrain(brain) for brain in brains) + if obj is not None + ] - devuuids = set(info.device.uuid for info in infos if info.device) + devuuids = {info.device.uuid for info in infos if info.device} # get ping severities zep = getFacade('zep') @@ -214,11 +216,12 @@ def removeIpAddresses(self, uids): errorCount = 0 for uid in uids: ip = self._getObject(uid) - # there is an interface do not delete it - if ip.interface(): + # there is an interface or manageDevice do not delete it + if ip.interface() or ip.manageDevice(): errorCount += 1 continue # remove it from the relationship + ip._pre_remove() parent = aq_parent(ip) parent._delObject(ip.id) removeCount += 1 diff --git a/Products/Zuul/facades/reportfacade.py b/Products/Zuul/facades/reportfacade.py index f7e84fda92..9ccb7ce274 100644 --- a/Products/Zuul/facades/reportfacade.py +++ b/Products/Zuul/facades/reportfacade.py @@ -16,7 +16,7 @@ from Products.Zuul.facades import TreeFacade from Products.Zuul.interfaces import ITreeFacade, IReportFacade, IMetricServiceGraphDefinition from Products.Zuul.routers.report import reportTypes, essentialReportOrganizers -from Products.Zuul.infos.metricserver import MultiContextMetricServiceGraphDefinition +from Products.Zuul.infos.metricserver import MultiGraphReportGraphDefinition _createMethods = [ 'manage_addDeviceReport', @@ -94,6 +94,6 @@ def getMultiGraphReportDefs(self, uid, graphGroup=None): info = getMultiAdapter((graphDef['graphDef'], graphDef['context'], graphDef['collection']), IMetricServiceGraphDefinition) else: # specialized adapter for combined graph groups - info = MultiContextMetricServiceGraphDefinition(graphDef['graphDef'], graphDef['context']) + info = MultiGraphReportGraphDefinition(graphDef['graphDef'], graphDef['context'], graphDef['collection']) graphs.append(info) return graphs diff --git a/Products/Zuul/infos/metricserver.py b/Products/Zuul/infos/metricserver.py index 060974c4c5..e14008c887 100644 --- a/Products/Zuul/infos/metricserver.py +++ b/Products/Zuul/infos/metricserver.py @@ -32,10 +32,11 @@ class MetricServiceGraph(HasUuidInfoMixin): - def __init__(self, graph, context): + def __init__(self, graph, context, graphsOnSame=None): self._object = graph self._context = context self._showContextTitle = False + self._graphsOnSame = graphsOnSame class MetricServiceGraphDefinition(MetricServiceGraph): @@ -371,7 +372,7 @@ def tags(self): class MultiContextMetricServiceGraphDefinition(MetricServiceGraphDefinition): """ - This is a specialized adapter for multi graph reports where we have metrics for multiple + This is a specialized adapter where we have metrics for multiple contexts on a single adapter. """ implements(templateInterfaces.IMetricServiceGraphDefinition) @@ -401,6 +402,8 @@ def _getGraphPoints(self, klass): self._updateRPNForMultiContext(infos, knownDatapointNames) + if self._graphsOnSame: + return infos[:self._graphsOnSame] return infos def _updateRPNForMultiContext(self, infos, knownDatapointNames): @@ -411,6 +414,32 @@ def _updateRPNForMultiContext(self, infos, knownDatapointNames): info.setMultiContextRPN(newRPN) +class MultiGraphReportGraphDefinition(MultiContextMetricServiceGraphDefinition): + """ + This is a specialized adapter for multi graph reports where we have metrics for multiple + contexts on a single adapter. + """ + implements(templateInterfaces.IMetricServiceGraphDefinition) + + def __init__(self, graph, context, collection=None): + super(MultiGraphReportGraphDefinition, self).__init__(graph, context) + self._collection = collection + + @property + def contextTitle(self): + """For multi graph reports we need group name in title.""" + obj = self._object + if hasattr(obj, "getGraphGroups"): + groupName = next( + (group.titleOrId() for group in obj.getGraphGroups() + if obj.id == group.graphDefId and self._collection.id == group.collectionId), + obj.titleOrId() + ) + else: + groupName = obj.titleOrId() + return groupName + + class OSProcessMetricServiceGraphDefinition(MetricServiceGraphDefinition): adapts(GraphDefinition, OSProcess) implements(templateInterfaces.IMetricServiceGraphDefinition) diff --git a/Products/Zuul/interfaces/actions.py b/Products/Zuul/interfaces/actions.py index 851cf1e1c7..31efc4ef43 100644 --- a/Products/Zuul/interfaces/actions.py +++ b/Products/Zuul/interfaces/actions.py @@ -7,7 +7,7 @@ # ############################################################################## - +from Products.ZenUtils.snmp import authentication_protocols, privacy_protocols from Products.Zuul.interfaces import IInfo from Products.Zuul.form import schema from Products.Zuul.utils import ZuulMessageFactory as _t @@ -198,15 +198,17 @@ class ISNMPv3ActionContentInfo(ISnmpTrapActionContentInfo): contextName = schema.TextLine(title=_t(u'Context Name')) authProto = schema.Choice(title=_t(u"Authentication Protocol"), - vocabulary=SimpleVocabulary.fromValues(['None', 'MD5', 'SHA']), - default = _t(u'None') + vocabulary=SimpleVocabulary.fromValues( + ('None',) + authentication_protocols + ), + default=_t(u'None'), ) securityName = schema.TextLine(title=_t(u'Security Name')) securityPassphrase = schema.Password(title=_t(u'Security Passphrase')) privProto = schema.Choice(title=_t(u"Privacy Protocol"), - vocabulary=SimpleVocabulary.fromValues(['None', 'DES', 'AES']), - default = _t(u'None') + vocabulary=SimpleVocabulary.fromValues(('None',) + privacy_protocols), + default=_t(u'None'), ) privacyPassphrase = schema.Password(title=_t(u'Privacy Passphrase')) diff --git a/Products/Zuul/routers/device.py b/Products/Zuul/routers/device.py index 4e74ad88b4..90429b98d9 100644 --- a/Products/Zuul/routers/device.py +++ b/Products/Zuul/routers/device.py @@ -1651,13 +1651,17 @@ def addDevice(self, deviceName, deviceClass, title=None, audit('UI.Device.Add', deviceUid, data_=auditData) return DirectResponse.succeed(new_jobs=Zuul.marshal(jobrecords, keys=('uuid', 'description'))) - @require('Manage Device') + def remodel_device_permissions(self, deviceUid, collectPlugins='', background=True): + ctx = self.context if deviceUid is None else self._getFacade()._getObject(deviceUid) + return Zuul.checkPermission(ZEN_MANAGE_DEVICE, ctx) + + @require(remodel_device_permissions) def remodel(self, deviceUid, collectPlugins='', background=True): """ Submit a job to have a device remodeled. @type deviceUid: string - @param deviceUid: Device uid to have local template + @param deviceUid: Device uid to remodel @type collectPlugins: string @param collectPlugins: (optional) Modeler plugins to use. Takes a regular expression (default: '') @@ -1994,14 +1998,18 @@ def getGraphDefintionsForComponents(self, uid): data = facade.getGraphDefinitionsForComponent(uid) return DirectResponse.succeed(data=Zuul.marshal(data)) - def getComponentGraphs(self, uid, meta_type, graphId, allOnSame=False): + def getComponentGraphs(self, uid, meta_type, graphId, limit, graphsOnSame, allOnSame=False): """ Returns the graph denoted by graphId for every component in device (uid) with the meta_type meta_type """ + data_length = 0 facade = self._getFacade() - data = facade.getComponentGraphs(uid, meta_type, graphId, allOnSame=allOnSame) - return DirectResponse.succeed(data=Zuul.marshal(data)) + data = facade.getComponentGraphs(uid, meta_type, graphId, limit, graphsOnSame, allOnSame=allOnSame) + if type(data) is dict: + data_length = data['data_length'] + data = data['data'] + return DirectResponse.succeed(data=Zuul.marshal(data), data_length=data_length) def getDevTypes(self, uid, filter=None): """ diff --git a/Products/Zuul/routers/zep.py b/Products/Zuul/routers/zep.py index da64549f16..865f8e2bab 100644 --- a/Products/Zuul/routers/zep.py +++ b/Products/Zuul/routers/zep.py @@ -1,6 +1,6 @@ ############################################################################## # -# Copyright (C) Zenoss, Inc. 2009, all rights reserved. +# Copyright (C) Zenoss, Inc. 2009, 2023 all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. @@ -18,7 +18,7 @@ import re import time from dateutil.parser import parse as parse_to_dt -from json import loads +from json import loads, dumps from lxml.html.clean import clean_html from zenoss.protocols.exceptions import NoConsumersException, PublishException from zenoss.protocols.protobufs.zep_pb2 import STATUS_NEW, STATUS_ACKNOWLEDGED @@ -31,7 +31,11 @@ from Products.ZenUtils.guid.interfaces import IGlobalIdentifier, IGUIDManager from Products.ZenEvents.EventClass import EventClass from Products.ZenMessaging.audit import audit -from Products.ZenModel.ZenossSecurity import ZEN_MANAGE_EVENTS +from Products.ZenModel.ZenossSecurity import ( + ZEN_MANAGE_EVENTS, + ZEN_MANAGER_ROLE, + MANAGER_ROLE +) from Products.ZenUtils.deprecated import deprecated from Products.Zuul.utils import resolve_context from Products.Zuul.utils import ZuulMessageFactory as _t @@ -43,6 +47,22 @@ READ_WRITE_ROLES = ['ZenManager', 'Manager', 'ZenOperator'] +ZEN_MANAGER_EDIT_PERM = ( + 'event_age_disable_severity', + 'event_age_interval_minutes', + 'event_archive_interval_minutes', + 'event_age_severity_inclusive', + 'default_syslog_priority', + 'default_trap_filtering_definition', + 'syslog_parsers', + 'syslog_summary_to_message', + 'default_syslog_message_filtering_rules', + 'default_availability_days', + 'event_time_purge_interval_days', + 'enable_event_flapping_detection', + 'flapping_event_class', +) + log = logging.getLogger('zen.%s' % __name__) class _FilterParser(object): @@ -619,8 +639,7 @@ def _hasPermissionsForAllEvents(self, permission, evids): log.debug(e) return False - def manage_events(self, evids=None, excludeIds=None, params=None, - uid=None, asof=None, limit=None, timeout=None): + def manage_events(self, evids=None, excludeIds=None, params=None, uid=None, asof=None, limit=None, timeout=None): user = self.context.dmd.ZenUsers.getUserSettings() if Zuul.checkPermission(ZEN_MANAGE_EVENTS, self.context): return True @@ -631,8 +650,7 @@ def manage_events(self, evids=None, excludeIds=None, params=None, if uid is not None: organizer = self.context.dmd.Devices.getOrganizer(uid) else: - return self._hasPermissionsForAllEvents(ZEN_MANAGE_EVENTS, - evids) + return self._hasPermissionsForAllEvents(ZEN_MANAGE_EVENTS, evids) except (AttributeError, KeyError): return False @@ -649,7 +667,7 @@ def manage_events(self, evids=None, excludeIds=None, params=None, ) return organizer.getBreadCrumbUrlPath() in manage_events_for - + def can_add_events(self, summary, device, component, severity, evclasskey, evclass=None, monitor=None, **kwargs): ctx = self.context.dmd.Devices.findDevice(device.strip()) @@ -1017,6 +1035,28 @@ def configSchema(self): 'allowNegative': False, 'value': self.context.dmd.ZenEventManager.defaultPriority },{ + 'id': 'default_trap_filtering_definition', + 'name': _t('SNMP Trap Filtering Rules'), + 'xtype': 'textarea', + 'allowNegative': False, + 'value': self.context.dmd.ZenEventManager.trapFilters + },{ + 'id': 'syslog_parsers', + 'name': _t('Syslog Parsers'), + 'xtype': 'textarea', + 'value': dumps(self.context.dmd.ZenEventManager.syslogParsers, indent=2) + },{ + 'id': 'syslog_summary_to_message', + 'name': _t('Mirror Syslog Event\'s Summary value to Message field'), + 'xtype': 'checkbox', + 'value': self.context.dmd.ZenEventManager.syslogSummaryToMessage + },{ + 'id': 'default_syslog_message_filtering_rules', + 'name': _t('Syslog Message Filtering Rules'), + 'xtype': 'textarea', + 'allowNegative': False, + 'value': dumps(self.context.dmd.ZenEventManager.syslogMsgEvtFieldFilterRules, indent=2) + },{ 'id': 'default_availability_days', 'name': _t('Default Availability Report (days)'), 'xtype': 'numberfield', @@ -1065,6 +1105,17 @@ def configSchema(self): }] return configSchema + def iseditable(self, field): + currentUser = self.context.dmd.ZenUsers.getUser() + if currentUser: + if currentUser.has_role(MANAGER_ROLE): + return True + + if currentUser.has_role(ZEN_MANAGER_ROLE) and field in ZEN_MANAGER_EDIT_PERM: + return True + + return False + def _mergeSchemaAndZepConfig(self, data, configSchema): """ Copy the values and defaults from ZEP to our schema @@ -1082,7 +1133,8 @@ def getConfig(self): # constructed to include default values and be keyed by the protobuf # property name. data = self.zep.getConfig() - config = self._mergeSchemaAndZepConfig(data, self.configSchema) + schema = self._mergeSchemaAndZepConfig(data, self.configSchema) + config = [setting for setting in schema if self.iseditable(setting['id'])] return DirectResponse.succeed(data=config) @require('Manage DMD') @@ -1101,11 +1153,28 @@ def setConfigValues(self, values): if defaultSyslogPriority is not None: self.context.dmd.ZenEventManager.defaultPriority = int(defaultSyslogPriority) + trapFilters = values.pop('default_trap_filtering_definition', None) + if trapFilters is not None: + self.context.dmd.ZenEventManager.trapFilters = trapFilters + + syslogParsers = values.pop('syslog_parsers', None) + if syslogParsers is not None: + self.context.dmd.ZenEventManager.syslogParsers = loads(syslogParsers) + + syslogSummaryToMessage = values.pop('syslog_summary_to_message', None) + if syslogSummaryToMessage is not None: + self.context.dmd.ZenEventManager.syslogSummaryToMessage = syslogSummaryToMessage + syslogMsgEvtFieldFilterRules = values.pop('default_syslog_message_filtering_rules', None) + if syslogMsgEvtFieldFilterRules is not None: + self.context.dmd.ZenEventManager.syslogMsgEvtFieldFilterRules = loads(syslogMsgEvtFieldFilterRules) + defaultAvailabilityDays = values.pop('default_availability_days', None) if defaultAvailabilityDays is not None: self.context.dmd.ZenEventManager.defaultAvailabilityDays = int(defaultAvailabilityDays) - self.zep.setConfigValues(values) + # filter by role whether user can update settings. + eventConfig = {key: value for (key, value) in values.items() if self.iseditable(key)} + self.zep.setConfigValues(eventConfig) return DirectResponse.succeed() def column_config(self, uid=None, archive=False): diff --git a/Products/Zuul/utils.py b/Products/Zuul/utils.py index d13c41a046..afb505db56 100644 --- a/Products/Zuul/utils.py +++ b/Products/Zuul/utils.py @@ -168,6 +168,15 @@ def unbrain(item): return item +def try_unbrain(item, default=None): + try: + return unbrain(item) + except KeyError: + if log.getEffectiveLevel() == logging.DEBUG: + log.warning("catalog object not found in ZODB uid=%s", item.uid) + return default + + class BrainWhilePossible(object): def __init__(self, ob): self._ob = ob @@ -485,7 +494,7 @@ def create_redis_client(): return client def _connected_to_redis(self): - """ Ensures we have a connection to redis """ + """Ensures we have a connection to redis""" if self._redis_client is None: now = time.time() if ( diff --git a/SCHEMA_VERSION b/SCHEMA_VERSION index ca77111dd4..e595a2db8c 100644 --- a/SCHEMA_VERSION +++ b/SCHEMA_VERSION @@ -1 +1 @@ -200.6.0 +200.7.0 diff --git a/VERSION b/VERSION index f0e13c5090..e029aa99b7 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -6.7.0 +6.8.0 diff --git a/bin/dumpstats b/bin/dumpstats new file mode 100755 index 0000000000..2a9cf4a58f --- /dev/null +++ b/bin/dumpstats @@ -0,0 +1,18 @@ +#!/bin/sh +# Retrieve statistics from zenhubworker + +set -e + +FILE=/opt/zenoss/etc/global.conf + +getprop() { + grep "${1}" $FILE | cut -d' ' -f2 +} + +_PORT=$(getprop 'localport') +PORT=${_PORT:-14682} +URL=http://localhost:$PORT/stats +curl -fs $URL +if [ $? -eq 0 ]; then + echo +fi diff --git a/bin/healthchecks/MetricShipper/store_answering b/bin/healthchecks/MetricShipper/store_answering index 904f4e6257..38f6f8f878 100755 --- a/bin/healthchecks/MetricShipper/store_answering +++ b/bin/healthchecks/MetricShipper/store_answering @@ -13,7 +13,7 @@ is_ready() { status_url=$1 - timeout 3 curl -A 'Metric_Shipper Store_answering Healthcheck' -w %{http_code} -s -XHEAD ${status_url} + timeout 3 curl -A 'Metric_Shipper Store_answering Healthcheck' -o /dev/null -w %{http_code} -s --head ${status_url} } http_code=$(is_ready http://localhost:8080/ping/status/metrics) diff --git a/bin/healthchecks/metrics_answering b/bin/healthchecks/metrics_answering index 20c3fb8057..b70c27f555 100755 --- a/bin/healthchecks/metrics_answering +++ b/bin/healthchecks/metrics_answering @@ -13,7 +13,7 @@ is_ready() { status_url=$1 - timeout 3 curl -A 'Metrics_answering Healthcheck' -w %{http_code} -s -XHEAD ${status_url} + timeout 3 curl -A 'Metrics_answering Healthcheck' -o /dev/null -w %{http_code} -s --head ${status_url} } http_code=$(is_ready http://localhost:8080/ping/status/metrics) diff --git a/bin/healthchecks/query_answering b/bin/healthchecks/query_answering index 4d8f9e36fa..9ebc341ad8 100755 --- a/bin/healthchecks/query_answering +++ b/bin/healthchecks/query_answering @@ -13,7 +13,7 @@ is_ready() { status_url=$1 - timeout 3 curl -A 'Query_answering is_ready' -w %{http_code} -s -XHEAD ${status_url} + timeout 3 curl -A 'Query_answering is_ready' -o /dev/null -w %{http_code} -s --head ${status_url} } http_code=$(is_ready http://localhost:8080/ping/status/performance) diff --git a/bin/healthchecks/zenhub_answering b/bin/healthchecks/zenhub_answering index 8cfddc679e..d1459682e7 100755 --- a/bin/healthchecks/zenhub_answering +++ b/bin/healthchecks/zenhub_answering @@ -8,9 +8,16 @@ # ############################################################################## -if [ -e /opt/zenoss/var/zenhub_connected ] -then - exit 0 -else - exit 1 -fi +set -e + +FILE=/opt/zenoss/etc/global.conf + +getprop() { + grep "${1}" $FILE | cut -d' ' -f2 +} + +_PORT=$(getprop 'localport') +PORT=${_PORT:-14682} +URL=http://localhost:$PORT/zenhub + +test "$(curl -sq $URL)" = "connected" diff --git a/bin/metrics/zenossStatsView.py b/bin/metrics/zenossStatsView.py index 526dfa43e0..a459a33302 100755 --- a/bin/metrics/zenossStatsView.py +++ b/bin/metrics/zenossStatsView.py @@ -1,7 +1,7 @@ #!/usr/bin/env python ############################################################################## # -# Copyright (C) Zenoss, Inc. 2017, all rights reserved. +# Copyright (C) Zenoss, Inc. 2017-2024, all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. @@ -34,6 +34,8 @@ class ZProxyMetricGatherer(MetricGatherer): ZPROXY_CONF_DIR = '/opt/zenoss/zproxy/conf/' # Note that changing the follwing ID format will break this script. INSTANCE_ID_FORMAT = '{}_{}' + + last_changed = {} def __init__(self, interval=30): super(ZProxyMetricGatherer, self).__init__() @@ -84,21 +86,28 @@ def get_zopes(self, first_time=False): self.zopes = {} # Check mtime of /opt/zenoss/zproxy/conf/zope-upstreams.conf - # if it's newer than, say, now - self.interval, reread it. + # if it's newer than it was modified last time, reread it. zope_upstream_file = self.ZPROXY_CONF_DIR + 'zope-upstreams.conf' zenapi_upstream_file = self.ZPROXY_CONF_DIR + 'apizopes-upstreams.conf' zenreports_upstream_file = self.ZPROXY_CONF_DIR + 'zopereports-upstreams.conf' zauth_upstream_file = self.ZPROXY_CONF_DIR + 'zauth-upstreams.conf' + + def read_upstream_file(upstream_file): + with open(upstream_file, 'r') as inf: + zopes = inf.readlines() + zopes = [line.rstrip('\n;') for line in zopes] + zopes = [line.split(' ')[-1] for line in zopes] + return zopes def check_upstream_util(upstream_file): upstream_modified = os.path.getmtime(upstream_file) - now = time.time() zopes = [] - if first_time or upstream_modified > (now - self.interval): - with open(upstream_file, 'r') as inf: - zopes = inf.readlines() - zopes = [line.rstrip('\n;') for line in zopes] - zopes = [line.split(' ')[-1] for line in zopes] + if first_time: + zopes = read_upstream_file(upstream_file) + self.last_changed[upstream_file] = upstream_modified + elif upstream_modified > self.last_changed.get('upstream_file'): + zopes = read_upstream_file(upstream_file) + self.last_changed[upstream_file] = upstream_modified return zopes def check_upstream(svcName, upstream_file): @@ -126,7 +135,7 @@ def get_metrics(self): for id_, zope in zopes.iteritems(): try: s = requests.Session() - # ZEN-29775 add a tomeout to zope instance call + # ZEN-29775 add a timeout to zope instance call result = s.get(self.ZPROXY_STATS_URL % zope, timeout=10) if result.status_code == 200: data = result.json() diff --git a/bin/nmap b/bin/nmap new file mode 100755 index 0000000000..b8a59641de --- /dev/null +++ b/bin/nmap @@ -0,0 +1,22 @@ +#!/bin/bash + +NMAP=/usr/bin/nmap + +CLEANED=() + +while [[ $# -gt 0 ]]; do + case $1 in + --script|-sC) + echo argument $1 not allowed + exit 1 + ;; + *) + CLEANED+=("$1") + shift + ;; + esac +done + +set -- "${CLEANED[@]}" + +exec ${NMAP} $@ diff --git a/bin/runtests.py b/bin/runtests.py index 760744b60b..3ff2b21204 100755 --- a/bin/runtests.py +++ b/bin/runtests.py @@ -13,6 +13,8 @@ Run unit and Selenium (functional) tests for Zenoss """ +from __future__ import print_function + import glob import optparse import os @@ -30,7 +32,7 @@ scriptdir = os.path.realpath(os.path.dirname(sys.argv[0])) sys.path[:] = [p for p in sys.path if os.path.realpath(p) != scriptdir] -ZENHOME = os.environ['ZENHOME'] +ZENHOME = os.environ["ZENHOME"] ZENPACK_HOME = "/var/zenoss/ZenPacks" @@ -42,14 +44,16 @@ def zenpackdir(*args): return os.path.join(ZENPACK_HOME, *args) -PYTHON = zenhome('bin', 'python') -CONFIG = zenhome('etc', 'zope.conf') -SOFTWARE_HOME = zenhome('lib', 'python') -PRODUCTS = zenhome('Products') +PYTHON = zenhome("bin", "python") +CONFIG = zenhome("etc", "zope.conf") +SOFTWARE_HOME = zenhome("lib", "python") +PRODUCTS = zenhome("Products") # add SOFTWARE_HOME to sys.path, but only if Zope isn't available try: - import Zope2 + import imp + + _ = imp.find_module("Zope2") except ImportError: sys.path.insert(0, SOFTWARE_HOME) @@ -61,46 +65,52 @@ def runZopeTests(options): from zope.testrunner.options import setup def load_config_file(option, opt, config_file, *ignored): - print "Parsing %s" % config_file + print("Parsing %s" % config_file) import Zope2 + Zope2.configure(config_file) setup.add_option( - '--config-file', action='callback', type='string', - dest='config_file', callback=load_config_file + "--config-file", + action="callback", + type="string", + dest="config_file", + callback=load_config_file, ) - defaults = '--tests-pattern ^tests$ -v'.split() - defaults += ['--config-file', CONFIG] - - if '-m' not in options: - defaults += ['-m', - '!^(' - 'ZConfig' - '|' - 'BTrees' - '|' - 'persistent' - '|' - 'ThreadedAsync' - '|' - 'transaction' - '|' - 'ZEO' - '|' - 'ZODB' - '|' - 'ZopeUndo' - '|' - 'zdaemon' - '|' - 'zope[.]testing' - '|' - 'zope[.]app' - ')[.]'] - - defaults += ['--path', SOFTWARE_HOME] - defaults += ['--package-path', PRODUCTS, 'Products'] + defaults = "--tests-pattern ^tests$ -v".split() + defaults += ["--config-file", CONFIG] + + if "-m" not in options: + defaults += [ + "-m", + "!^(" + "ZConfig" + "|" + "BTrees" + "|" + "persistent" + "|" + "ThreadedAsync" + "|" + "transaction" + "|" + "ZEO" + "|" + "ZODB" + "|" + "ZopeUndo" + "|" + "zdaemon" + "|" + "zope[.]testing" + "|" + "zope[.]app" + ")[.]", + ] + + defaults += ["--path", SOFTWARE_HOME] + defaults += ["--package-path", PRODUCTS, "Products"] sys.exit(testrunner.run(defaults, options)) @@ -116,10 +126,10 @@ def overrideCoreTests(results): """ commercial = False for result in results: - if 'Skin' in result: + if "Skin" in result: commercial = True if commercial: - results.remove(zenhome('Products', 'ZenUITests')) + results.remove(zenhome("Products", "ZenUITests")) return results @@ -137,14 +147,14 @@ def findSeleniumTests(packages=None, regex=None): if packages is None: packages = [] prods = findSeleniumTestableProducts( - packages, regex, testdir='tests/selenium' + packages, regex, testdir="tests/selenium" ) results = [] if not regex: - regex = 'testAll' - regex += '.py' + regex = "testAll" + regex += ".py" for prod in prods: - selpath = os.path.join(prod, 'tests', 'selenium', regex) + selpath = os.path.join(prod, "tests", "selenium", regex) if os.path.exists(selpath): results.append(selpath) return results @@ -164,19 +174,21 @@ def demangleEggName(eggdir, name): @rtype: string """ path = eggdir - components = name.split('.', 2) # ie a list with three items + components = name.split(".", 2) # ie a list with three items # Note, we discard the last item to satisfy findTestableProducts for component in components[0:1]: if os.path.isdir(os.path.join(path, component)): path = os.path.join(path, component) continue - newcomponent = component.split('-', 1)[0] + '*' + newcomponent = component.split("-", 1)[0] + "*" found = glob.glob(os.path.join(os.path.join(path, newcomponent))) if len(found) != 1: # Ouch! Something bad happened - print "Unable to find egg directory from %s and %s" % \ - (path, component) + print( + "Unable to find egg directory from %s and %s" + % (path, component) + ) return eggdir path = os.path.join(path, found[0]) return path @@ -194,13 +206,13 @@ def expandPackDir(fulldir): @rtype: string """ name = os.path.basename(fulldir) - if not name.endswith('.egg'): + if not name.endswith(".egg"): # Old-style ZenPack return fulldir return demangleEggName(fulldir, name) -_packname = re.compile('ZenPacks\.[^-/]+\.[^-/]+').search +_packname = re.compile(r"ZenPacks\.[^-/]+\.[^-/]+").search def zenPackName(s): @@ -216,24 +228,23 @@ def findZenPackNames(): def findZenPacksFromDirectory(directory): - dirs = [] + paths = [] try: for item in os.listdir(directory): fullpath = os.path.join(directory, item) if os.path.isdir(fullpath): - dir = expandPackDir(fullpath) - elif item.endswith('.egg-link'): - f = file(fullpath) - dir = expandPackDir(f.readline().strip()) - f.close() + path = expandPackDir(fullpath) + elif item.endswith(".egg-link"): + with open(fullpath) as f: + path = expandPackDir(f.readline().strip()) else: continue - if not dir.endswith('ZenPacks'): - dir = os.path.join(dir, 'ZenPacks') - dirs.append(dir) + if not path.endswith("ZenPacks"): + path = os.path.join(path, "ZenPacks") + paths.append(path) except OSError: pass - return dirs + return paths def findZenPackDirectories(): @@ -243,32 +254,36 @@ def findZenPackDirectories(): @return: list of ZenPack directories @rtype: list of strings """ - return findZenPacksFromDirectory(ZENPACK_HOME) + \ - findZenPacksFromDirectory(zenhome("ZenPacks")) + return findZenPacksFromDirectory(ZENPACK_HOME) + findZenPacksFromDirectory( + zenhome("ZenPacks") + ) def findZenossProducts(include_zenpacks): """ Get all Zenoss products + ZenPacks. """ - validProds = ['Products.'+x for x in ( - 'DataCollector', - 'Jobber', - 'ZenCallHome', - 'ZenEvents', - 'ZenHub', - 'ZenModel', - 'ZenModel.migrate', - 'ZenRRD', - 'ZenRelations', - 'ZenReports', - 'ZenStatus', - 'ZenUtils', - 'ZenWidgets', - 'Zuul', - 'ZenCollector', - 'ZenMessaging', - )] + validProds = [ + "Products." + name + for name in ( + "DataCollector", + "Jobber", + "ZenCallHome", + "ZenEvents", + "ZenHub", + "ZenModel", + "ZenModel.migrate", + "ZenRRD", + "ZenRelations", + "ZenReports", + "ZenStatus", + "ZenUtils", + "ZenWidgets", + "Zuul", + "ZenCollector", + "ZenMessaging", + ) + ] if include_zenpacks: zenpacks = findZenPackNames() else: @@ -277,14 +292,14 @@ def findZenossProducts(include_zenpacks): def isValidPackage(package, validProducts): - package_seq = package.split('.') + package_seq = package.split(".") return any( - product == '.'.join(package_seq[:len(product.split('.'))]) + product == ".".join(package_seq[: len(product.split("."))]) for product in validProducts ) -def findSeleniumTestableProducts(packages=None, regex=None, testdir='tests'): +def findSeleniumTestableProducts(packages=None, regex=None, testdir="tests"): """ Get the list of Zope Products with tests @@ -300,53 +315,55 @@ def findSeleniumTestableProducts(packages=None, regex=None, testdir='tests'): if packages is None: packages = [] results = [] - for target in findZenPackDirectories() + [zenhome('Products')]: - for root, dirs, files in os.walk(target): - + for target in findZenPackDirectories() + [zenhome("Products")]: + for root, dirs, _ in os.walk(target): # don't look past a lib directory that is under the ZenPacks # directory rootParts = root.split(os.path.sep) - if 'ZenPacks' in rootParts and \ - 'lib' in rootParts[rootParts.index('ZenPacks'):]: + if ( + "ZenPacks" in rootParts + and "lib" in rootParts[rootParts.index("ZenPacks") :] + ): continue for dir in dirs: if ( packages # ZenPacks have a problem unless you do this - and target.split('/')[-1] not in packages + and target.split("/")[-1] not in packages and dir not in packages ): continue # Sigh. We need to make sure no-one ends a ZenPack # with 'Products' - if target.endswith('Products'): + if target.endswith("Products"): if ( - not (dir.startswith('Zen') or dir == 'DataCollector') - or dir in 'ZenTestRunner' + not (dir.startswith("Zen") or dir == "DataCollector") + or dir in "ZenTestRunner" ): continue newdir = os.path.join(root, dir) if testdir in os.listdir(newdir): - init_file = os.path.join(newdir, testdir, '__init__.py') + init_file = os.path.join(newdir, testdir, "__init__.py") if not os.path.exists(init_file): - print ( + print( "Warning: missing the %s file -- skipping %s" ) % (init_file, target) elif regex: - f = os.path.join(newdir, testdir, regex + '.py') + f = os.path.join(newdir, testdir, regex + ".py") if os.path.exists(f): results.append(newdir) else: results.append(newdir) results = overrideCoreTests(results) if not results: - print "No %s directories found for %s" % (testdir, packages) + print("No %s directories found for %s" % (testdir, packages)) return results def runSeleniumTests( - packages=None, regex=None, zenoss_server=None, selenium_server=None): + packages=None, regex=None, zenoss_server=None, selenium_server=None +): """ Run any Selenium tests that match the regular expression. @@ -363,17 +380,22 @@ def runSeleniumTests( packages = [] tests = findSeleniumTests(packages, regex) for testscript in tests: - command = ['python', testscript] + command = ["python", testscript] for arg in (zenoss_server, selenium_server): if arg is not None: command.append(arg) - rc = call(command) + rc = call(command) # noqa: S603 exitcodes.append(rc) def runUnitTests( - packages=None, modules=None, names=None, coverage="", count=0, - include_zenpacks=True): + packages=None, + modules=None, + names=None, + coverage="", + count=0, + include_zenpacks=True, +): """ Run unit tests for any packages that match the regular expression. @@ -398,45 +420,45 @@ def runUnitTests( if not isValidPackage(pkg, valid_packages): packages.remove(pkg) invalid_packages.append(pkg) - print "="*30 - print - print "Packages to be tested:" + print("=" * 30) + print() + print("Packages to be tested:") for p in packages: - print "\t" + p - print + print("\t" + p) + print() if invalid_packages: - print "Invalid packages:" + print("Invalid packages:") for p in invalid_packages: - print '\t' + p - print "="*30 + print("\t" + p) + print("=" * 30) - cmdline_args = ['--config-file', CONFIG] + cmdline_args = ["--config-file", CONFIG] # Add ZenPack homes to package directories for d in findZenPackDirectories(): - path = d.rsplit('/', 1)[0] + path = d.rsplit("/", 1)[0] name = zenPackName(d) if name in packages or name in modules: - cmdline_args.extend(['--test-path', path]) - packdir = os.path.join(path, *name.split('.')) - libdir = os.path.join(packdir, 'lib') + cmdline_args.extend(["--test-path", path]) + cmdline_args.extend(["--package-path", path, name]) + packdir = os.path.join(path, *name.split(".")) + libdir = os.path.join(packdir, "lib") if os.path.exists(libdir): - cmdline_args.extend(['--ignore_dir', 'lib']) + cmdline_args.extend(["--ignore_dir", "lib"]) - cmdline_args.extend(chain.from_iterable(['-s', p] for p in packages)) - cmdline_args.extend(chain.from_iterable(['-m', m] for m in modules)) - cmdline_args.extend(chain.from_iterable(['-t', t] for t in names)) + cmdline_args.extend(chain.from_iterable(["-s", p] for p in packages)) + cmdline_args.extend(chain.from_iterable(["-m", m] for m in modules)) + cmdline_args.extend(chain.from_iterable(["-t", t] for t in names)) if count: - cmdline_args.append('-'+('v'*count)) + cmdline_args.append("-" + ("v" * count)) if coverage: - cmdline_args.extend(['--coverage', coverage]) + cmdline_args.extend(["--coverage", coverage]) if packages or modules or names: sys.argv[:] = sys.argv[:1] runZopeTests(cmdline_args) -usage = \ - """%prog [options] [package1 [package2]] +usage = """%prog [options] [package1 [package2]] Run Zenoss tests against specified packages. @@ -458,56 +480,61 @@ def runUnitTests( def main(): - parser = optparse.OptionParser(prog='runtests.py', usage=usage) + parser = optparse.OptionParser(prog="runtests.py", usage=usage) parser.add_option( - '-t', '--type', - type="choice", choices=("unit", "selenium", "all"), default='unit', - help='The type of tests to run (default: %default)' + "-t", + "--type", + type="choice", + choices=("unit", "selenium", "all"), + default="unit", + help="The type of tests to run (default: %default)", ) parser.add_option( - '-c', '--coverage', - help='Directory to store coverage stats' + "-c", "--coverage", help="Directory to store coverage stats" ) parser.add_option( - '-v', dest="count", action="count", - help="Verbosity of test output" + "-v", dest="count", action="count", help="Verbosity of test output" ) parser.add_option( - '-m', '--module', action="append", - help='The name of a test module.' + "-m", "--module", action="append", help="The name of a test module." ) parser.add_option( - '-n', '--name', action="append", - help='The name of an individual test' + "-n", "--name", action="append", help="The name of an individual test" ) parser.add_option( - '--selenium-server', - help='The server hosting the Selenium jar' + "--selenium-server", help="The server hosting the Selenium jar" ) parser.add_option( - '--zenoss-server', - help='The Zenoss server against which Selenium should test' + "--zenoss-server", + help="The Zenoss server against which Selenium should test", ) parser.add_option( - '-Z', '--no-zenpacks', - dest='no_zenpacks', action="store_true", default=False, - help='Only run core tests, even if ZenPacks are installed' + "-Z", + "--no-zenpacks", + dest="no_zenpacks", + action="store_true", + default=False, + help="Only run core tests, even if ZenPacks are installed", ) (options, args) = parser.parse_args() - if options.type in ('unit', 'all'): + if options.type in ("unit", "all"): runUnitTests( - args, options.module, options.name, options.coverage, - options.count, not options.no_zenpacks + args, + options.module, + options.name, + options.coverage, + options.count, + not options.no_zenpacks, ) - if options.type in ('selenium', 'all'): + if options.type in ("selenium", "all"): runSeleniumTests( args, options.name, options.zenoss_server, options.selenium_server ) -if __name__ == '__main__': +if __name__ == "__main__": start = time.time() main() if exitcodes: diff --git a/bin/upgrade_reindex.sh b/bin/upgrade_reindex.sh deleted file mode 100755 index 3bfe4c0846..0000000000 --- a/bin/upgrade_reindex.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -# -# Only force a reindex if we're upgrading from < 6.x. The upgrade script provides the top level service name as an argument. -# - -# Get the major version number that we're upgrading from. -MAJOR=$(cat /var/zenoss/upgrade_from_version.txt | cut -d"." -f1) -TOPSERVICE=$@ - -# Resmgr/Core upgraded to Solr in version 6.0.0. UCSPM will probably upgrade to Solr in 3.x -if [[ "${TOPSERVICE}" == "ucspm" ]]; then - SOLRVERSION=3 -else - SOLRVERSION=6 -fi - -# Only force a catalog reindex if the current major version of RM is less than 6. -if [[ $MAJOR -lt $SOLRVERSION ]]; then - /opt/zenoss/bin/zencatalog run --createcatalog --forceindex -else - echo "Upgrading from ${TOPSERVICE} major version ${MAJOR}. Skipping catalog reindex." -fi diff --git a/bin/zenglobalconf b/bin/zenglobalconf index 19e597310b..83b8718983 100755 --- a/bin/zenglobalconf +++ b/bin/zenglobalconf @@ -143,8 +143,9 @@ def main(): print "--sync-zope-conf only valid with global.conf" sys.exit(1) # load zcml for the product - import Products.ZenossStartup + from Products.ZenUtils.zenpackload import load_zenpacks from Products.Five import zcml + load_zenpacks() zcml.load_site() # look up the utility from zope.component import getUtility diff --git a/bin/zenjobs b/bin/zenjobs deleted file mode 100755 index 08e3ee49be..0000000000 --- a/bin/zenjobs +++ /dev/null @@ -1,43 +0,0 @@ -#!/opt/zenoss/bin/python2.7 -############################################################################## -# -# Copyright (C) Zenoss, Inc. 2009-2019 all rights reserved. -# -# This content is made available according to terms specified in -# License.zenoss under the directory where your Zenoss product is installed. -# -############################################################################## - -from __future__ import absolute_import - -import re -import sys - -from celery.__main__ import main - - -def _initialize_zenoss_env(): - from Zope2.App import zcml - import Products.Jobber - import Products.ZenWidgets - from OFS.Application import import_products - from Products.ZenUtils.Utils import load_config, load_config_override - from Products.ZenUtils.zenpackload import load_zenpacks - - import_products() - load_zenpacks() - zcml.load_site() - load_config("signals.zcml", Products.Jobber) - load_config_override('scriptmessaging.zcml', Products.ZenWidgets) - - -_initialize_zenoss_env() - -sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - -# All calls to celery need to the application so include it here. -sys.argv[1:] = [ - "-A", "Products.Jobber.zenjobs", -] + sys.argv[1:] - -sys.exit(main()) diff --git a/bin/zenrun.d/zenpack-manager.sh b/bin/zenrun.d/zenpack-manager.sh index 6ecbcc4d46..b0340bf9f9 100644 --- a/bin/zenrun.d/zenpack-manager.sh +++ b/bin/zenrun.d/zenpack-manager.sh @@ -85,9 +85,6 @@ link() { fi rsync -a "$zenpackPath"/ "$TARGET"/ - # Get var path (/var/zenoss) to strip off of TARGET - VARPATH=$(unset TERM; echo "from Products.ZenUtils.Utils import varPath;print varPath()" | zendmd --script /dev/stdin) - sudo /opt/zenoss/bin/var_chown "${TARGET#$VARPATH}" shift zenpack --link --install "$TARGET" "$@" diff --git a/bin/zensyslog b/bin/zensyslog index dcd1922ec5..6fcfcc6d61 100755 --- a/bin/zensyslog +++ b/bin/zensyslog @@ -12,7 +12,7 @@ . $ZENHOME/bin/zenfunctions PRGHOME=$ZENHOME/Products/ZenEvents -PRGNAME=zensyslog.py +PRGNAME=zensyslog CFGFILE=$CFGDIR/zensyslog.conf generic "$@" diff --git a/bin/zentrap b/bin/zentrap index 4076c10c8b..8331855e5e 100755 --- a/bin/zentrap +++ b/bin/zentrap @@ -8,11 +8,10 @@ # ############################################################################## - . $ZENHOME/bin/zenfunctions PRGHOME=$ZENHOME/Products/ZenEvents -PRGNAME=zentrap.py +PRGNAME=zentrap CFGFILE=$CFGDIR/zentrap.conf generic "$@" diff --git a/etc/sudoers.d/zenoss_dmidecode b/etc/sudoers.d/zenoss_dmidecode deleted file mode 100644 index b8152fc353..0000000000 --- a/etc/sudoers.d/zenoss_dmidecode +++ /dev/null @@ -1,4 +0,0 @@ -# Allows Zenoss to examine hardware information -%zenoss ALL=(ALL) NOPASSWD: /usr/sbin/dmidecode -Defaults:zenoss !requiretty - diff --git a/etc/sudoers.d/zenoss_nmap b/etc/sudoers.d/zenoss_nmap index 4543296c66..0fa92555b0 100644 --- a/etc/sudoers.d/zenoss_nmap +++ b/etc/sudoers.d/zenoss_nmap @@ -1,4 +1,3 @@ -# Allows Zenoss to use nmap for pinging -%zenoss ALL=(ALL) NOPASSWD: /usr/bin/nmap +# Allow privileged execution of nmap wrapper script +%zenoss ALL = NOPASSWD: /opt/zenoss/bin/nmap Defaults:zenoss !requiretty - diff --git a/etc/sudoers.d/zenoss_ping b/etc/sudoers.d/zenoss_ping index ccb1eb9ac5..b5ccf5b671 100644 --- a/etc/sudoers.d/zenoss_ping +++ b/etc/sudoers.d/zenoss_ping @@ -1,5 +1,3 @@ -# Allows Zenoss to use ping for pinging -%zenoss ALL=(ALL) NOPASSWD: /usr/bin/ping -%zenoss ALL=(ALL) NOPASSWD: /usr/bin/ping6 +# Allow privileged execution of ping and ping6 +%zenoss ALL = NOPASSWD: /usr/bin/ping,/usr/bin/ping6 Defaults:zenoss !requiretty - diff --git a/etc/sudoers.d/zenoss_var_chown b/etc/sudoers.d/zenoss_var_chown deleted file mode 100644 index 486cb0e4e8..0000000000 --- a/etc/sudoers.d/zenoss_var_chown +++ /dev/null @@ -1,4 +0,0 @@ -# Allows Zenoss to run a chown command in its var directory -%zenoss ALL=(ALL) NOPASSWD: /opt/zenoss/bin/var_chown -Defaults:zenoss !requiretty - diff --git a/install-zenoss.mk.in b/install-zenoss.mk.in deleted file mode 100644 index 21ca9184eb..0000000000 --- a/install-zenoss.mk.in +++ /dev/null @@ -1,29 +0,0 @@ -TARGET = /mnt -ZENHOME = /opt/zenoss - -SITE_PACKAGES = lib/python2.7/site-packages -PTH_FILE = $(SITE_PACKAGES)/Zenoss-nspkg.pth -EGG_LINK = $(SITE_PACKAGES)/Zenoss.egg-link - -.PHONY: install configure-user - -install: $(TARGET)/$(PTH_FILE) $(TARGET)/$(EGG_LINK) $(TARGET)/Zenoss.egg-info - -configure-user: - groupmod -g %GID% zenoss - usermod -u %UID% zenoss - chown -R zenoss:zenoss $(ZENHOME) - -$(TARGET)/Zenoss.egg-info: $(ZENHOME)/$(PTH_FILE) - -$(ZENHOME)/$(PTH_FILE): | configure-user - su - zenoss -c "cd $(TARGET); python setup.py develop" - -$(TARGET)/$(SITE_PACKAGES): - su - zenoss -c "mkdir -p $@" - -$(TARGET)/$(PTH_FILE): $(ZENHOME)/$(PTH_FILE) | $(TARGET)/$(SITE_PACKAGES) - su - zenoss -c "cp $< $@" - -$(TARGET)/$(EGG_LINK): | $(TARGET)/$(SITE_PACKAGES) - su - zenoss -c "printf \"/opt/zenoss\n.\n\" > $@" diff --git a/javascript.mk b/javascript.mk index 41c884c42a..15e833025f 100644 --- a/javascript.mk +++ b/javascript.mk @@ -47,10 +47,10 @@ JSBUILD_COMMAND = java -jar $(JSBUILDER) -p $(JSB_FILE) -d $(JS_BASEDIR) -v JSB_SOURCES = $(shell python2 -c "import json, sys, os.path; d=sys.stdin.read(); p=json.loads(d)['pkgs'][0]['fileIncludes']; print ' '.join(os.path.join('$(JS_BASEDIR)', e['path'], e['text']) for e in p)" < $(JSB_FILE)) JSB_TARGETS = $(JS_OUTPUT_DIR)/zenoss-compiled.js $(JS_OUTPUT_DIR)/zenoss-compiled-debug.js -.PHONY: clean-javascript build-javascript - +.PHONY: build-javascript build-javascript: $(JSB_TARGETS) +.PHONY: clean-javascript clean-javascript: @-rm -vrf $(JS_OUTPUT_DIR) diff --git a/jenkins_build.sh b/jenkins_build.sh index 2edce67824..1400318340 100755 --- a/jenkins_build.sh +++ b/jenkins_build.sh @@ -49,11 +49,8 @@ REPO_PATH=${ZENDEV_ROOT}/src/github.com/zenoss/${REPO_NAME} cleanup() { RC="$?" - if [[ $RC == 0 ]]; then - zendev drop ${ZENDEV_ENV} - docker image rm zendev/devimg:${ZENDEV_ENV} zendev/product-base:${ZENDEV_ENV} - docker image rm zendev/mariadb:${ZENDEV_ENV} zendev/mariadb-base:${ZENDEV_ENV} - fi + zendev drop ${ZENDEV_ENV} + docker image rm -f zendev/devimg:${ZENDEV_ENV} zendev/product-base:${ZENDEV_ENV} zendev/mariadb:${ZENDEV_ENV} zendev/mariadb-base:${ZENDEV_ENV} } trap cleanup INT TERM EXIT @@ -132,4 +129,13 @@ if [ "$1" != "--no-tests" ]; then fi echo Building the artifacts... -cdz ${REPO_NAME};make clean build +docker run --rm \ + -v ${HOME}/.m2:/home/zenoss/.m2 \ + -v ${ZENDEV_ROOT}/zenhome:/opt/zenoss \ + -v ${ZENDEV_ROOT}/src/github.com/zenoss:/mnt/src \ + -w /mnt/src/zenoss-prodbin \ + --env BRANCH=${BRANCH} \ + --env ZENHOME=/opt/zenoss \ + --env SRCROOT=/mnt/src \ + zendev/devimg:${ZENDEV_ENV} \ + make clean build diff --git a/makefile b/makefile index 1b3a4174ab..340bd70e5f 100644 --- a/makefile +++ b/makefile @@ -3,23 +3,11 @@ BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD) ARTIFACT_TAG ?= $(shell echo $(BRANCH) | sed 's/\//-/g') ARTIFACT = prodbin-$(VERSION)-$(ARTIFACT_TAG).tar.gz -IMAGE = zenoss/zenoss-centos-base:1.4.0.devtools - -USER_ID := $(shell id -u) -GROUP_ID := $(shell id -g) - -DOCKER = $(shell which docker 2>/dev/null) -ifneq ($(DOCKER),) -_common_cmd = $(DOCKER) run --rm -v $(PWD):/mnt -w /mnt -DOCKER_USER = $(_common_cmd) --user $(USER_ID):$(GROUP_ID) $(IMAGE) -DOCKER_ROOT = $(_common_cmd) $(IMAGE) -endif +IMAGE = zenoss/zenpackbuild:ubuntu2204-5 ZENHOME = $(shell echo $$ZENHOME) -.PHONY: default test clean build javascript build-javascript - -default: $(ARTIFACT) +.DEFAULT_GOAL := $(ARTIFACT) include javascript.mk include migration.mk @@ -27,31 +15,23 @@ include migration.mk EXCLUSIONS = *.pyc $(MIGRATE_VERSION).in Products/ZenModel/migrate/tests Products/ZenUITests ARCHIVE_EXCLUSIONS = $(foreach item,$(EXCLUSIONS),--exclude=$(item)) -ARCHIVE_INCLUSIONS = Products bin lib etc share Zenoss.egg-info +ARCHIVE_INCLUSIONS = Products bin etc share VERSION setup.py +.PHONY: build build: $(ARTIFACT) # equivalent to python setup.py develop +.PHONY: install install: setup.py $(JSB_TARGETS) $(MIGRATE_VERSION) ifeq ($(ZENHOME),/opt/zenoss) - @python setup.py develop + @pip install --prefix /opt/zenoss -e . else @echo "Please execute this target in a devshell container (where ZENHOME=/opt/zenoss)." endif +.PHONY: clean clean: clean-javascript clean-migration - rm -f $(ARTIFACT) install-zenoss.mk - rm -rf Zenoss.egg-info lib + rm -f $(ARTIFACT) -$(ARTIFACT): $(JSB_TARGETS) $(MIGRATE_VERSION) Zenoss.egg-info +$(ARTIFACT): $(JSB_TARGETS) $(MIGRATE_VERSION) VERSION setup.py tar cvfz $@ $(ARCHIVE_EXCLUSIONS) $(ARCHIVE_INCLUSIONS) - -Zenoss.egg-info: install-zenoss.mk setup.py -ifneq ($(DOCKER),) - $(DOCKER_ROOT) make -f install-zenoss.mk install -else - $(error The $@ target requires Docker) -endif - -install-zenoss.mk: install-zenoss.mk.in - sed -e "s/%GID%/$(GROUP_ID)/" -e "s/%UID%/$(USER_ID)/" $< > $@ diff --git a/migration.mk b/migration.mk index c1f5c4d2e6..e3a1c25f21 100644 --- a/migration.mk +++ b/migration.mk @@ -11,16 +11,17 @@ SCHEMA_MAJOR = $(call pick_version_part,1,$(SCHEMA_VERSION)) SCHEMA_MINOR = $(call pick_version_part,2,$(SCHEMA_VERSION)) SCHEMA_REVISION = $(call pick_version_part,3,$(SCHEMA_VERSION)) -.PHONY: clean-migration generate-zversion generate-zmigrateversion - +.PHONY: clean-migration clean-migration: rm -f $(MIGRATE_VERSION) # Exists for backward compatibility +.PHONY: generate-zversion generate-zversion: generate-zmigrateversion # See the topic "Managing Migrate.Version" in Products/ZenModel/migrate/README.md # for more information about setting the SCHEMA_* values. +.PHONY: generate-zmigrateversion generate-zmigrateversion: $(MIGRATE_VERSION) $(MIGRATE_VERSION): $(MIGRATE_VERSION).in SCHEMA_VERSION @@ -33,6 +34,7 @@ $(MIGRATE_VERSION): $(MIGRATE_VERSION).in SCHEMA_VERSION # The target replace-zmigrationversion should be used just prior to release to lock # down the schema versions for a particular release +.PHONY: replace-zmigrateversion replace-zmigrateversion: @echo Replacing SCHEMA_MAJOR with $(SCHEMA_MAJOR) @echo Replacing SCHEMA_MINOR with $(SCHEMA_MINOR) @@ -52,6 +54,7 @@ SCHEMA_FOUND = $(shell grep Migrate.Version Products/ZenModel/migrate/*.py | gr # The target verify-explicit-zmigrateversion should be invoked as a first step in all release # builds to verify that all of the SCHEMA_* variables were replaced with an actual numeric value. +.PHONY: verify-explicit-zmigrateversion verify-explicit-zmigrateversion: ifeq ($(SCHEMA_FOUND),) @echo "Good - no SCHEMA_* variables found: $(SCHEMA_FOUND)" diff --git a/setup.py b/setup.py index 5b3f274e00..46d194b4ea 100644 --- a/setup.py +++ b/setup.py @@ -1,22 +1,24 @@ +from __future__ import print_function + from os import path # , walk from distutils.command.build import build from setuptools import setup, find_packages from setuptools.command.develop import develop from setuptools.command.install import install -from setuptools.command.sdist import sdist _here = path.abspath(path.dirname(__file__)) with open(path.join(_here, "VERSION"), "r") as _f: - _version = ''.join(_f.readlines()).strip() + _version = "".join(_f.readlines()).strip() class ZenInstallCommand(install): """Used to disable installs.""" def run(self): - print "Installation disabled" + print("Installation disabled") import sys + sys.exit(1) @@ -24,8 +26,9 @@ class ZenBuildCommand(build): """Used to disable builds.""" def run(self): - print "Build disabled" + print("Build disabled") import sys + sys.exit(1) @@ -41,13 +44,6 @@ class ZenDevelopCommand(develop): ) -def applySchemaVersion(*args, **kw): - print("Applied: %s %s" % (args, kw)) - - -sdist.sub_commands.append(("apply_schema_version", applySchemaVersion)) - - setup( name="Zenoss", version=_version, @@ -76,8 +72,12 @@ def applySchemaVersion(*args, **kw): "install": ZenInstallCommand, }, entry_points={ + "console_scripts": [ + "configcache=Products.ZenCollector.configcache.__main__:main", + "zenjobs=Products.Jobber.bin:main", + ], "celery.commands": [ - "monitor=Products.Jobber.monitor:ZenJobsMonitor", + "monitor=Products.Jobber.monitor:MonitorCommand", ], }, )