From 7947e611cbfab475c16554a5cea64a5c5625d3bd Mon Sep 17 00:00:00 2001 From: Daniel Glauche Date: Fri, 30 Jul 2021 02:31:18 +0200 Subject: [PATCH] Initial commit --- README.md | 7 + bin/notableeditor.py | 75 + bin/splunklib/__init__.py | 20 + .../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 387 bytes .../__pycache__/binding.cpython-37.pyc | Bin 0 -> 50522 bytes .../__pycache__/client.cpython-37.pyc | Bin 0 -> 132015 bytes bin/splunklib/__pycache__/data.cpython-37.pyc | Bin 0 -> 7289 bytes bin/splunklib/__pycache__/six.cpython-37.pyc | Bin 0 -> 26865 bytes bin/splunklib/binding.py | 1415 +++++++ bin/splunklib/client.py | 3737 +++++++++++++++++ bin/splunklib/data.py | 266 ++ bin/splunklib/modularinput/__init__.py | 12 + bin/splunklib/modularinput/argument.py | 103 + bin/splunklib/modularinput/event.py | 114 + bin/splunklib/modularinput/event_writer.py | 87 + .../modularinput/input_definition.py | 60 + bin/splunklib/modularinput/scheme.py | 85 + bin/splunklib/modularinput/script.py | 177 + bin/splunklib/modularinput/utils.py | 74 + .../modularinput/validation_definition.py | 86 + bin/splunklib/ordereddict.py | 128 + bin/splunklib/results.py | 295 ++ bin/splunklib/searchcommands/__init__.py | 155 + .../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 5733 bytes .../__pycache__/decorators.cpython-37.pyc | Bin 0 -> 15483 bytes .../__pycache__/environment.cpython-37.pyc | Bin 0 -> 3356 bytes .../eventing_command.cpython-37.pyc | Bin 0 -> 5026 bytes .../external_search_command.cpython-37.pyc | Bin 0 -> 6971 bytes .../generating_command.cpython-37.pyc | Bin 0 -> 16950 bytes .../__pycache__/internals.cpython-37.pyc | Bin 0 -> 23804 bytes .../reporting_command.cpython-37.pyc | Bin 0 -> 8352 bytes .../__pycache__/search_command.cpython-37.pyc | Bin 0 -> 33402 bytes .../streaming_command.cpython-37.pyc | Bin 0 -> 6182 bytes .../__pycache__/validators.cpython-37.pyc | Bin 0 -> 12814 bytes bin/splunklib/searchcommands/decorators.py | 450 ++ bin/splunklib/searchcommands/environment.py | 123 + .../searchcommands/eventing_command.py | 149 + .../searchcommands/external_search_command.py | 228 + .../searchcommands/generating_command.py | 350 ++ bin/splunklib/searchcommands/internals.py | 844 ++++ .../searchcommands/reporting_command.py | 281 ++ .../searchcommands/search_command.py | 1127 +++++ .../searchcommands/streaming_command.py | 195 + bin/splunklib/searchcommands/validators.py | 394 ++ bin/splunklib/six.py | 980 +++++ default/app.conf | 12 + default/commands.conf | 7 + default/searchbnf.conf | 9 + local/app.conf | 4 + metadata/default.meta | 3 + metadata/local.meta | 13 + 51 files changed, 12065 insertions(+) create mode 100755 README.md create mode 100755 bin/notableeditor.py create mode 100644 bin/splunklib/__init__.py create mode 100755 bin/splunklib/__pycache__/__init__.cpython-37.pyc create mode 100755 bin/splunklib/__pycache__/binding.cpython-37.pyc create mode 100755 bin/splunklib/__pycache__/client.cpython-37.pyc create mode 100755 bin/splunklib/__pycache__/data.cpython-37.pyc create mode 100755 bin/splunklib/__pycache__/six.cpython-37.pyc create mode 100644 bin/splunklib/binding.py create mode 100644 bin/splunklib/client.py create mode 100644 bin/splunklib/data.py create mode 100644 bin/splunklib/modularinput/__init__.py create mode 100644 bin/splunklib/modularinput/argument.py create mode 100644 bin/splunklib/modularinput/event.py create mode 100644 bin/splunklib/modularinput/event_writer.py create mode 100644 bin/splunklib/modularinput/input_definition.py create mode 100644 bin/splunklib/modularinput/scheme.py create mode 100644 bin/splunklib/modularinput/script.py create mode 100644 bin/splunklib/modularinput/utils.py create mode 100644 bin/splunklib/modularinput/validation_definition.py create mode 100644 bin/splunklib/ordereddict.py create mode 100644 bin/splunklib/results.py create mode 100644 bin/splunklib/searchcommands/__init__.py create mode 100755 bin/splunklib/searchcommands/__pycache__/__init__.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/decorators.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/environment.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/eventing_command.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/external_search_command.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/generating_command.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/internals.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/reporting_command.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/search_command.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/streaming_command.cpython-37.pyc create mode 100755 bin/splunklib/searchcommands/__pycache__/validators.cpython-37.pyc create mode 100644 bin/splunklib/searchcommands/decorators.py create mode 100644 bin/splunklib/searchcommands/environment.py create mode 100644 bin/splunklib/searchcommands/eventing_command.py create mode 100644 bin/splunklib/searchcommands/external_search_command.py create mode 100644 bin/splunklib/searchcommands/generating_command.py create mode 100644 bin/splunklib/searchcommands/internals.py create mode 100644 bin/splunklib/searchcommands/reporting_command.py create mode 100644 bin/splunklib/searchcommands/search_command.py create mode 100644 bin/splunklib/searchcommands/streaming_command.py create mode 100644 bin/splunklib/searchcommands/validators.py create mode 100644 bin/splunklib/six.py create mode 100755 default/app.conf create mode 100755 default/commands.conf create mode 100755 default/searchbnf.conf create mode 100755 local/app.conf create mode 100755 metadata/default.meta create mode 100755 metadata/local.meta diff --git a/README.md b/README.md new file mode 100755 index 0000000..06440a8 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +# TA-notableeditor +This app provides a custom search command to mass edit notable events + +## Example +``` +`notables` | head 10 | editnotables status="closed" +``` \ No newline at end of file diff --git a/bin/notableeditor.py b/bin/notableeditor.py new file mode 100755 index 0000000..aa0a19b --- /dev/null +++ b/bin/notableeditor.py @@ -0,0 +1,75 @@ +from splunklib.searchcommands import dispatch, ReportingCommand, Configuration, Option +import splunklib.client as client +import sys +import json + +STATUS_MAP = { + 'new': 1, + 'in progress': 2, + 'pending': 3, + 'resolved': 4, + 'closed': 5 +} + +VALID_URGENCIES = [ + 'critical', 'high', + 'medium', 'low', + 'informational' +] + +@Configuration(requires_preop=True) +class EditNotablesCommand(ReportingCommand): + comment = Option( + doc='The comment to set', + require=False) + + status = Option( + doc='The status to set', + require=False) + + urgency = Option( + doc='The urgency to set', + require=False) + + newOwner = Option( + doc='The new owner of the notables', + require=False) + + @Configuration() + def map(self, records): + return records + + def reduce(self, records): + args = {} + if self.comment: + args['comment'] = self.comment + + if self.status and self.status.lower() in STATUS_MAP.keys(): + args['status'] = STATUS_MAP[self.status.lower()] + + if self.urgency: + args['urgency'] = self.urgency + + if self.newOwner: + args['newOwner'] = self.newOwner + + if not self.urgency.lower() in VALID_URGENCIES: + yield {'result': f"The urgency value provided is not valid. Valid ones are: {VALID_URGENCIES}" } + + if not args: + yield {'result': 'Please provide at least one of the options comment, status, urgency, newOwner' } + return + + event_ids = [] + for record in records: + event_ids.append(record['event_id']) + + args['ruleUIDs'] = event_ids + req = client.Endpoint( + client.connect(token=self._metadata.searchinfo.session_key), + '/services/notable_update' + ).post(body=args) + + yield json.loads(req['body'].readall().decode()) + +dispatch(EditNotablesCommand, sys.argv, sys.stdin, sys.stdout, __name__) \ No newline at end of file diff --git a/bin/splunklib/__init__.py b/bin/splunklib/__init__.py new file mode 100644 index 0000000..525dc8e --- /dev/null +++ b/bin/splunklib/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Python library for Splunk.""" + +from __future__ import absolute_import +from splunklib.six.moves import map +__version_info__ = (1, 6, 16) +__version__ = ".".join(map(str, __version_info__)) diff --git a/bin/splunklib/__pycache__/__init__.cpython-37.pyc b/bin/splunklib/__pycache__/__init__.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..fbc6e6880da41c977dfba046f765ee215b6ee7d5 GIT binary patch literal 387 zcmYL^Jx{|h5Qgo<{h%MB5*t(2Ad!g)A;gEozb-{_#Yz~fI41>vNiMWm^Tctp24*0Eun zp=P{Obl$p>$)4=IM(>pOi|&l1y_du3COq0Q(Q58;w@H@PC9_J`<#X1eD@kdypU=J3 zb&#M;WnI9h*yL3jw-gK7?iLe7L$kYdujv64b^*W#0LwbmP6D_H^ZdZ?jImYb)jBhF zCB4I?2%i9~r1Q#_pvt8MXxNigWfS}0`0zgoAH)su!wz4!*Qa9Y?1Rfqa-+2L;%9Op z!$Ra$<;DHw@!DwI$>bqOC+4aYe?thsW-0(=)u!p68C%p^p6{c*?;}_=q9e-aH=EaO AqyPW_ literal 0 HcmV?d00001 diff --git a/bin/splunklib/__pycache__/binding.cpython-37.pyc b/bin/splunklib/__pycache__/binding.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..45f85f1ca29a3482b8a9c7c4719c330ea6e61109 GIT binary patch literal 50522 zcmeHwYiwL+dfuG5b4XFt#j@*!`^jB{v21QY{Kbi%K21tSa zFp8wl^M2p?&Y2-8%Ny@z(~8oZIdd-G_5I%a`+hI&+gD2AZ}Nwh^Mn7IO8qzfi2iuz zMSOxkwo)lKRZh82liQd8UymXXHCu&dPVLoRjZ-IWOOZasl7jMsaPRJRtAn z8l|Hr(mn2e7Vmt{8})4OnZ=Ad=8nH-l~1@YxD)Oq&ZhA8=iEbh`;e42?VfNC z;*A+xpK>3=^~b!EDD@P6PrC>3`+#=}zfZd}?nyj%#y#bp#`js=KjWUo{j*ZebNKz7 zdk(+Pd6|`Sc;k6@);;eYMxE!~%&pYK=bb;pbk8q)_T*%+-q>iaHtGx03-zX3Z!S$v z+G{O$qv6@>e(QGK^#a?m8?DW$hIiX**!n5E-V8l|(W!ZM*s{ZAuT;GyCD}Kx-I}*A zzj4F9upEZ#vokYps}@YFYNl(gwVA7}+QypK44tsvYR;&)W*ScD1>p=|#=G)eYoUr} z7+k7Wrc0&y<$4e`Y}skL4YUi@+M9LXvt2KA>W#o|E!s7w(LgWcUeuHmcCb-fww<8l zY~Y<{Sg%RtX!@$x3{Ie@-eUcZz3zm|`1R$Rui9R-*5YWKuu+#CwCq-M+P+*1H=Kq% zRtmf&b^?!At6#h}U#-f6)#@9s-;y8xl5`UHSpZwQ?)U*)hDQ2cu-h5vWA^eXkW^_Cyb;o88$xwA3=9)1kVJlqM_oTjtn`M6SaFUZ!}ycPQBw%o~Y_?VrA9|HPVci#7EE#Jji z=?x5ycg^=(te|x54yHnWbq`(cnKE^;fHpO~TG%aIA&}g`iGm5gJMi-S{2S`2fkq3^ zu-pp5a`ybm^DlIBgvg-A+WD8pCyP%8pWrACVd?`c-S<{OTsZ%cUT6{IJ3a z=w@y^jdBstAwMy6*5cnyeR$)_?AventtH=Cv#-~&Qr=cz^0pV&-geg4gSY1|Pu*E- zEVleLC&W5_8>{+l11tPR*q>hC>gJl>W@YgzJ1bNqU9?J8(HgPZ$0}%N#obUnnZ6){ z9bB507bYw}k9Q{0{-^Qf+dMqQ3zoXT(d+K(Dd7q(1)PqQb-3U^gYOzAh=07wk34b` zCt+&ETEXacQunRx^qpg!^u6r;)Ya5GBioryW+m6LaLivwy?gXcP%)0Wa{z_SV<7=+ zv}7rISeRS(s<$mA!<(F(omH23IZ@0-`|I2CD6dzDHM~kqwP}28 zZR?^mckD#sl?n6sv-VtTQ?Siqs{y1x6%r}SzSl~k+huBtq4BrA`9arV>{#KSGa+dq@Zy5iSofvx#VS2j0M@CN`--P~I1 zwik4Vh#;cDE)U%TN^E$l58b>BQBWQPVes6F{OV>f1{2wGDITkCDXy)&PiD{6C3f@b zF1@^j6aL+nj zJSgKi@2mn^LBZ=mNYZn9D%?P)1k!f`zr3PasVSH5g&Tg8C7{Q3SIup;ss=?5V>G++ z_8tJPp0O|5l{K_Tk4Qyb9sk-mfob%FJu1UGI&s35E0xpc8crwP`FMaHRe3Q1s-GvE zR8A|nMVY%md_aiXZMf99JP6SA$Pvm|0GNWrh%4|`|B+qvVx#pA)8 z!pNC!ditb)9Pf2=GWvbs`QgBYC9mn-S@$o6tO0lw5ZM0gF7p@@0rLt-Qg)?PZ%$Zc z5N$!MedTu#?NU-Lsws8ApTaqRn?J6B7x4*3aPU%P%T1NhH3%wc((3M!%3Q0LYnUgW z=lplPI1iL9y#zvQ`JSt={A{h^1i>tzTDS>Jn{P+g3|Z7H)buUSzg@3+Y9{1GAY3OT zo^7^5u*yqIQm#h$;N=|5c2u;KU9Coi;OD{$I2pcO^F5FjR}fok1DqcSP!IDX5F|XS z;VQxWE<38?EibeeJdh|ym|)G96BJuuCjg6UUbx(HPY8LCQlz}ay4P?Cr=(^N)y@DF zg)MoxTHSrDt5x0mSL>_ZWc6*kaR>3CB{M;9e1CjrCNbj<6OlpdIO! zzU()yQRwhorwnFLHOgZEc;YtG4*BhZ7EVni=i3=pF%>XhRAsgUx z>*_00>wf(aI*tB!X3|irt@nccT@006@@g-&=1`Xbjf9S{<@fZ5g~q4UiEi zdhP`^XWa{??Ki<}Nof>>ASZbB+u+!C8HE$}q}!SlxyMwv(cD?36sC(;bx|6Fm>V}? z+6H=Pp0Fv?2qUL@AIoB7J|JHKJ{>8JBLX(DT8$?UkPjXV<1IL0!K*o1BI|p~R&!eq zq##Tgq%VrQCv4D1&cGT*wGK)xi&?nVx?qXdU(u7+TdT$+Xc=Y-#fgyH;E-$X$~-V$ zZ`K+cWbiSAII4l+qOj|N1?s-Ad6aIYG>`WtZ0}CZTMr2tP4Ig`04R{slYN)6^g5LD zzDD|jv^t5FgLyIMVtm&RQX6-xO)yuJej zLk^`{okv+B{8g(HGMgL>qDO#GqwXwVI5!=Dxx!v*Iq4b{Cc#~33tDJH5Q1ktk!TD` zYNErAUK1y5!kCQWmUNX8sKU!!0Lke_dv();)%toxReO<4xxtfra7{`kx79k5TF61i zBR)QMJb;Sjcra!kx5tCpvbP3Qs~%U&Zk+P(1Q@FRswWidqg4%%3L!-rSTW09uEq7C zC;FBmBT!@jOgY3(BT$2yz!Dlz_vl+(>iC$mVAsYb?9bU_u4gZfQ6%ZVEjMBYP`!dT zlFzAUKH}|e>UxbVH2?TbR{IJ*0U7TQ98+lBP5Z+X{5xs?DN6B_?C*oQ=w$qJ>UWr3 z$=%E2s$5T8D@51a6!5w=H&N`C>cBJ*+M6}cKZ81;843L^NDoEjia*FpIgu2)CBa)2 zXwrgiPR>EdvVqrF^pEke!Ne?fhZG=H0x&Pj2?{2}>v=r>+xP^-IHasnI%{REA$$(! z+6Ut?o7Sx`vNqYf0zNV#mvCZ6FUK62~8Z<9+?|phggnvT)|=WP?13{jCtr6E}+en>z>2647Yy{XB=+4 zpoiN(&zq##&`^Ayw{y~z{y}8_e-|Gmj^#_%;dI~NNtr!^XTa$p2QM4uR`T}*Y@p6a z_YK~vvCI-v$Dy({!y6i*yA@ zEX;O9KXBJNLevCBpW@*(55!r%0N9H>BMA7{d5D2Z&IGcENq+&KfIw2T zvsP)iIEa4*S@dQ=IA+vb*7)d`aM&uj7Nud>i)OQcF4&8%TB&Ud_M&4QYXxw&n)XkJ zY2y65>2F%i^f&P=mL-(I->{7Bh-#LMGl1e>7Hb3;4Duo52SokVE4kSoRokGFkXu@m z!}wgS;elDBzMrrK@B~j?ts4C`Xf5cfFgHpj1nG6cjle#8@{~R9)M{Q3fQ@g~J$C|- zCxuXkTWGmkN}j{()Ym~j4yIDH_a}5S1)q|QRj*a6z4EeVp-`&kf`omUTnm;a*@Ze7 z!!^_ndw_1dNGppQP0@wgz!Zw-qTUlSMSWfPS2hmzjoYTnTG5DNXUTa-Wpa5!w$MEn z{(9HusRgLiiBd+SN0g!1gX@rct?GUFeU+Yj84oNmWwlF zPft(V&yqif2%z;gKA2+|uH?D7`Y&Vo0m!MB>*m#L`BYL&q`Miku$u-2jA5Bz+&#Fn zE&m?Z1*%gA)6lk$;4_r=3A;T|P2MHLM;~mua2|s+d6kA12O1BKt?Imu&bL&5X`$j! zD4~JsYz?mw1E+gg!3+zvhD|D(>rhE6;ILtwP$nrhh=0`9LZX-IJ61@ zSY23gXnpj{*TKz_H|L_Php4k!)t7|rUy+8N>YeKmycM6 zyPyUrY!c`|i;?k?nM_VuvLC9esvEkuJG!Ds9|3JD9X^1Vbu1A83GwvDA_ z8DkMsHzOU51C+P{Wed&CvH~G5Ldp-k2|ZpKFD9mtDrvLiF!epl4PpRnup3HVmih!+ z^&L2Dx5!})clbR5d?^q+Hv4cj9i3rMEH$` zx!N@53~Ijd_>$`_+Kc0T7`QeKer$YnhExP9Sg>oRPWD#m*6p*W%~dGBU_$Q$@bN|U zn!%>Tq~s!qkb^(Pp_5tx)3jRj=Q^o->H823aV6tVmiX``{y*b_frcyVfrq zv|v8&z|8?*kMerT}1AIpYe8)Phl>@?e{6OmY0ZPoE zMEJ@?wSWWBrQu%oLA5oXGNpA{TF?zkZmyF_@G>ZCJM)c<;bq7U=I7c^-&CvC7A)qu zv%MH=TF6`bz~yKXBRbhG$Cuz}Q3K-lpb1%k3L8KN4a}x+8bV?eVIj(fNdc-GjVJ6| zhARSY0EYl;s^5YMx;+s!N)hE*`xaIuDO5i~ot_x%=0vS6YTE7~)X0%3uRCBMuUn*# z1=YXZBDGkXa6sth0DkqRkXRw!{ucoRigITa&-)}Ve2Rw~IKZkgsGCljsA#&7^8$5; zFz>^iDHVJckEYVFm!}VAO1YAC#7d_}K+i|ACF`J-#`UB41sb1j_rqvxO3?+z7WJQD zD{k^YRsz@Mg4Djh57Pex4=9`Z=;3GmTliHY$G|^+)92s{KEWgo@Kmr?^ZsEs-LW1| zxtYho6GQfb&>%LX%#oCvbMut>CkoE@uw-D-fF5H5whK|q%JjA%Rc*mx1$JQKnw~%W6O2;@Bl5; zn^5*a%S3KEX+poCs<{Mx*ab}Yvb5#W1!CDtU$_7^&ELADSZ%Oz6x$*^%ezjI4h~*B9rSpnnUfR8DcF!Xaj333`3K;v7q$x zptV}6(E8Fn#0pt>0&(LL)IX{r>I0PXFKBx+Cpz&4N8sY9BCXTFuIqINZZsj35y_Hv z+z04AxWv;ZPoA1R=`Ng~J^!3@W)^^YQXS4ar@h&eXP!Sf`}~&5hmvm~4t)imU>=7K zuqbfoQRqy?DZ|ZvA9NggW8h9)KdG*F(uwPNTz{F@iDjefq2(4t=T!u@9dKSNYI~qz z0cqwF+zTWa`2cdw1?w=bX`FgZ$mlKQl?g>XuxY0+!%X<2U7fG!m-_ras6AcmOIZ zG?Ku^CAr=LL8ow`3#ADt`80GSv-T*dHl0CG1;3UWtp%qsD)o*ATToN1jVg9u(TO!! z@HBTDdqTnc6l|$@-iiOq?#)-tNcd){y3yK;m+Y@ghqq7IUq?f>72vTW(k`ChnP!VB zn4OPAZGONbx+lp})FoBZs49At^WzvBDu+g4TxD}~0{FMmn!DyX@Z5o{z$X;djmAnw zAEj_o1z7_?sG4!b0;lQ(ccy>_hGuzwbV}C@)u>x@eEPLQLe#ckRHEKqfX;dk(9ly1 zXpsWpc1z1ICW=xfTS=7^lo9xa@r4T#ngC022tJ~*rCV{M39T%k&S{V-0DnOXv4U0z ztT9Z>b+*migKJ&_1OUg<2n>Pv%u;@f+b1IfUsYdu_Mt&^R&Cax8IoI+MjNoT5y<_z zSRzo66sY~CT~iu5wT_DrMqr57y|@lQ#2mrEDAHWC^e900EIv+9PcuYt!%%6L4XLqO zH6js~7uwtetAu6Bmt!YrwK5W3-BG2Ox7&4ZGa8USlo8lq(m4T84{@qCM9Hbw&c(0S zHhdpjNq9`D zD4Lc?^`NJaK%u(|VIOk;QWM4_f|odblI>4uti;M9#Q~zJ-qCpN1WwV9YE`YC1X>a+ zF)Ja2>5A|$o&N&#NW&H&X0g7cePp34Q69GxU#N$qlg}c&PD7~VZcF=Hu(Hz!1MP(i zF1+t)m50Y#v;reWPN7o89cjEOSak<`q;MA$3&c&DpNn3Oa6ybt4Spq)$K^Px-cvrA zGA61{IW@}Vex7V}4oaKRo=Ww*7(j7d>2>zR2&-ao@^K24_xg*}AViJHP8Tlt?mE*q z#p}E(HECZ~E|dt6JcXM$UrjLr1|M z!dg&UPN+OSq9{#nc6%H0YhK~_ZeEcT|7&1hz#SEIOJEQsW$$KCHMG}xRhVDlQ|WG& zurb#@exr%i2B%P2b3!>HCGf00rb`-|gNbV&-s_gWLPu+{Ex_UZ9bEGN8V@W~$<}?k z@c}bHF4-`#WS^n)dGzsXc$FXX2Tm#VB0j-A9C{qGK8^67tT<#rj1`Bhf?K3RmRE2G z+>!+C6x~602-gO@5<+>NVzAMBQ0*X`XGpF*?e2F+^p$;b#bywZzA`LVo^c;@kLWA= z-N)UdC~rgu6diC!C8WqI9~5)uv;ILe(|HpHs)WCnL473hz#wA45lS0t`o!Nwal>%N zQaU!E6%D5f!(P$ejDU2yH!`M0>k08TAwCf{BQ`6o(g+kIMKLM12mp+XoM@&+VMWSR zDuRaPw(NkeX7xo7V`Zb#HUQQb5hLBs%i-U!!0s<=Pl&q9e3SjkHmEs-_7y>`vRSgC8OhWusM=~UdI(NUFla!bS>4hs6_*ey zSO6=hy}_iwofQTX9C%Yv&&=tu{t*TBB;`<&dOyNd#6ij3^B7K^*$dZR zf{$vS0()F+K@;|^Tdz`8iKnA@GKwxzZDL7kFHJ>zqq3{jms%~Df8my!q_7%g?n2jD zGGWy$y`B9Pdfh{GO>vR91p2Nej31HGYo0REiq^V>zknBK^OBmRPl5^fi`BO)f1tos zmKk}3o+YW=PNQ`M9X2tE+iH#hprO^~8WSYcgMg9%riNnF?NZHaXCsx8qk=^FvBw{gjWqepcF1<*#{niG;R&jH7V%_~kZVgnl&1YPB$(tRF^MNedix2myGc`^ChgXKDzn*LB##BORSXOAs~8T*?}4y%KYiE2(}Up<-p}Jx#Aom>OV}3< zFT&0CySzUv&m%lS-rq03hV-uy`L$14nyMU-U&H#|LHV^`-#a9~MwZ}9i82qncy=}a zJ@V5!&iBrBe8h8`LE-wn_Hkpt~?mTXv2SvHwt=<`(yYu@p!lJ zGQtmEtuJ&lFmppejbCERluEk8SFc^a{N-2YE4Qw^eC@SsQfYT6y7)5U&ZP<640x&d z`Wvs`oL8-)>RJq;$}%RhlWHnk=`Zp2;DWfM2{u%1JP2p5x+3v9S}%}@M1qCP?DzlLh>FTbf>R>X`3gBRn1bFLCbN;CG;`{uYHIaOY5LDg(K&_$|et^Rs zPL_j!h-ko(-op5T9*PDjaldDrxa?(|=!Zr-1EbaNV$rW=F<$BRzrLvhJYi0NS77A0 z^pRr&gb48z;clLz-QU;~I_s#BhIbO<>$LE}du9lYzm=*@g=j=YC5kvwlY3EVRuiCVU3WD5mkSv6TNFeqan!_f!5T zTx;PGk6)SlDaM~uQLva^5_``|&R+mj=H*!M7uYJvDw3(hf-qRs)&e8U(K3j=bmaoaW@a9!O*w$=;3Ul>8qC0Y?GnXX zgGxlJ{D9cS)*!ko^L96XMWR&O#g~Y*k(+_hvM?mJa~t8})cJ1C32OCvci&~@VbB(9 zQhVSf!JB-%pqthn&?ktro!BoRC!ws}EJK3@I=eaepsxjjdJ~wKOSYR=!5!Va3vDF= z`rgGwMQo;dTP1-As72NufI-n%m{Ha<3+$Yl2oZ^JqfJ)8Nt9M97|Rf=FK9U zRoah5D5dA!-Iv3QC=2N>@L7Sg$O`xfWXyom5ztH8|4f*@hg1wm(E!(ydwD!ZfCjMQ zIIx($Z^6BZNIUJ&8zsU+P_wk)*O`!@VUBOrR=rSk9K^4CPrk@PS{W1xBou$>?N5cl z!bW4&Mjji6wh{0dK;U|JDBVMARc|UHOToXxfRozUiyFD0izV~r-&U`+}qGA<9K6x?I z%ZF40?2C2{nR?VSyFD;1!4u=7%yk2h7nx(`)by#h{kNN=44|No`wSg#X2RAC?7Jbp z6MS!6RaDZ;cxDzwVWCBZjMt{AZ#sdtP)RiLpoUzAMrb{px~7x%%<2Hha7zk%;G4;1 z%oaq|*QSMmf6%*cO({T}y3t(U2&Z1dGKB;gLPZE)(WQ^9y6*x12-ea>35t0E_HNVgCT$?d}5)z#&`MqFf&HqZ&aV zjtnLD`8vp($A}l8gHEL$qqBL*I%>7|M_51uLX9Mlf8@7}9PtuwbpLj4OzB>QDjz3GC#MET_;Z00U&46G+4H1`|&rW56WJ>Vh~{V@Rxf|%rAid@RIubEkFv4)EFWbuqjtgAjW!H0zqA{!12 zr>scCXoX0oLGfHzKP7~j*4I7w9&u8ZXa}Me7DCk|4e#WNVm4ZNGph%Qn>+_~o;i8O zv|amkD~^+%lwuN({ERn&whT+2D2ra_rU=#!%Vp%L2}*5z;QO@QVr`aye!M_cr!muV zmEK^G9t}Zs-h(5AvjW$ za>-zIxQwKdx6FL9*-7Z-Z~iIUfv#bUms%!#%8HLq$H!qf@V8N9Ll7~P=p;X+GD!Ec=v1LV*=^T z5J?Z4OW`uIZH=9J{`|SIiCtcds^<6j7-SiURRQ0)?$=KMvD>f1o#WICFPyi<{SqFS z_7`89e~6;gys;$3XSfgX4%)5xQNCAev;yy8-U12orr2k{)!>WG)|ANa%tmo%3UPiH zPvR6t*eTx=)g&C5PQbHls>U}Tq6Q{SGh=eHjwXHK6yb=p34ZuU92rk;DqF^T4d zxP|2r9nBz>9r7W;R*HbGiz23tO}w+qOpPfh#te_W{>s>H4=6Ck1Mqu|ldy4SV{mMO zoZo>6hvbaH;{p_)jdD;Jyx#*~yM(v&76}{{D3|hNVY|>OQ!JEjvF`+3DcVnKo=HpZ z~)8d^rEj? zW+{MUJ^W}&2aMB+~a&Yk0+>hEMvY$H%vhW2v2dfs{V-eBv$30VwnE2Sx?PS z{75$7xEH3Bz3ssAu(AakPW2JH%TeK9zuD!dmE8rFM zTm@J?$MLZTvAAbW&YpShr=P_Y5c~xB+npe7IOPX1w@eh%$K4(yOSUI#J0PsB{|jIg z`?*@5*T~H(6YC1jCQ8a+{Q=cbzbf@u=p(RT{5;b zucpowt!!E`wh<88AK;BEQr!|=Ax!udL(S{0>ztXMU% zx_?>BEzJXuxZA(7id6>g(X5`KYde;T0hY(xA1fG5{BhKMaGE=E- zq}iSa#DGGrlQvP?8#>l;gXU^Ra71cf0c8rfrXWfPSh8^F2pxHno@Gka1_T0~#umc2 zq8J5a8HIkEPF~s)9T#I1Vdi1V)77d1Rs?M!4<^%tA+JrO$_t@;V}nc~Y?#*F@2wOx z#h8Yg_jhS<)N$y$6V{AJx^1Dvis?`4Dt5Sh^0`O4?TBwt17;+K-4x5(!BlC2f1-lS zk5!+px}{WOZy0kCmw2?GwPA_tT@tDZsxTqZy{bco6iSI=bc)ufbigcc@b%b$MU0F@ z6f{`T%S`zbm?h7d;cd1|YDCNp{LREhhXlDSLHXH3S>Lm5MTCrKKkX?^d66LC1(?o( zPZlGvcNWL`Oj9)`f`ZWGFNz>Wk4nUcA#4V|$H;b`&_amOM~!T>)dIaM1h^8OTE-cS za%!YC{Ht%WAiVS&ETE>Qf%zgV6G#uTo7x$U7h>GFIHtbsNf?EF%r??OOr2V@JFY*4Fo3{PzZ(pWB}^Zu`b{4StUocq@IgXgZ^}jb z{``1Xl);v}2%Vo61~bBTI}9ohEp;->2$FE`E_RBcKA1l56`dQ6R^S#-Sm`Ag`TOAO(h2hor0D- zi*V?HTc21^m=S7#E`9K0O?Qf6zB6#w3JV=%BfFmy=bqxdfld+k24Jm0<`XyrmOAu2 z%{jjg`^pVQW5Ra^VU#}H;3Tm^PqYdKn8a^~AevxxAMCiJ(la4j!nB?*z4+6`T`LAn z5C%f|v?ol@V(bqYi+dC?cZ!)z@^p%=e&jXOpJ-$6x&N_BIl_wT8g^^K|C?Sz8ujnO zg{!4_aSW94C(lWrJ@@?S(~mgmPo9&0e)ja)*;6n4^cz6L0X4>t@sG~#C47We{x3MuF<(c)4O;<^>ENiVBE7J&!s>v3TJG?e4o>SkIsau| zm1Ej}SzpcjH+i5D?kL&wdt5K1ZR<1@Au|547PT)jQv5s++;p&D4~+^V~EjF z*;Vuo2Xv6u!C<>albPbjp|W2r+p6f&JHA` zlZUneNTy@S+^iFm2xF&M0mRI)8#^oNNp=REAI6t?Fp>`T4{-PWi-am^nU?9pCzvF> zCkFtV{w$l!Y|zP6KX}@V=3M)*Nsc$e8G@rFis@RXkc3JiwI%#nFwGcNdV6RZ&r6Q! zSG=u66rW0ey$?%-;H9>Rm^h>Y`{Ht5K@lowdx>WfwztYNithbrk(iBpt~jn=VBj28+hV+2I%iJPEv<~__A3<_VIWylTGLGNlWB4 z_k;ps#68AhXMrBM)kJ$=q)CvGGDwlg5ja92MJBFdIB~_Q6KSOt5<&p>%ync?j$*KN zazN-eRnl$&Y-JSF0ol`-1x7Tg@I@J^2ZTZA(IRBcdykzHU(s|P*gSS4Rkuy75>Yr& zxuEgCqgpK6du<~w@m|_RY$5{w7`a3zQf}~-4o-A3_+P|s7)4emQ!a)*NXChilb7ZJE5Cz!WnfXhd8I`y${@iT+413@t!!NBJ7ulT;A3$ zsRPb`K*N6q>Hm;j2wIcg5DYEc67){Ho)6ZsKTJHhix?t=;RLT{D{qUZdgO zOweAX@VeS+BN2HP6LQsG(LnU~<3SSeWdL2A{u_jM+=27L&J^H@fi*ESMh#-x{$nDA z05^%+mEIX^lH`Web|YAt^t4?ZgI3BPiwB0|R-2Ao|HsEv>UbpvM?Xvlc6->a%^?1S zK{xXF@zW=HVb51$KSmw6quyo7$-gl}n&)n*z#jeh%+3NnMqP<Sc$2MH3n?)dA6hfW^1*!yclf!@wTH}z%p4DVhCWDY&IA*K zkvblz{}cJdMt$Glvo#(hp(X{U2bmmLe>dF>%DEO-VL68#!Vonqf>ocB8~y6Oor%P$ zXG+0?>+hG&d15q~_zYEcIC$qD&?g9q^qjQb^LPxA&D znR{Suf)h9?K97CmjueZ<($KNurzj;3pO@n$Iljtc${M~}yi_c3sO6755k^O4R$>%) zm_kmY6v9iG_W1R3I(cP8J;# z^BoDHNsW3FLYv|~Zf;5uArye@DqSRPG4`#b+yPw!mf1oqXtND;4y^guf*{V{LHV2+ zMWZxOJsCW8^}EBal4%%&9SJ<@%A*XA*y5WoKoHprz@j98dhzSMnLdFGk4AI(6UfB~ zBraRJA(!Nvl!~I93gAX@2l*xR)(mjOf>Z~d7!oVx6=#V`6)?eANQTjgfC%xJ$S4BO zjUGr$RJ9|WY-HNG%mg<`IRnwGy&F-9KwOI~dvLBFM}hN^vE9bmi&0|u2t*Ta*+K0A-NJ!1bhzLY*~gRGIuW}=gqT#O5DrkTe^UHKy^>JgwFjwS@ffwx2L8UO*|-Dq;>Nu zqFSlwe+3VS9?Sn#o(<#B9k_Ofsd+GR11F6_57c07Uwf)*} zunhYt}K*{Zm_%x7r7d zVZ|gDB&(X+n+c68i#f-`sOLl0$bEodW9S7xa)mGABipm~fM43pASXPuNOX;nUFO<9 zOr{`lh!lVgv{8asLT9|9)-h7LLMJ2X*dnwlyX;>Jh3OlCWU!=zO%2|O&2ajbB9%%rtCK^7%mTB^I#LXvStDF4 zMmIN#AJ8VIS1e=-O+%Y#g_(PRSDqt-SVm{&lsN(qt!+Zz=NV@~U@OW)`J1@V&D9(Z zOgwD{Mh;`P)Sh^m;dJ}h<(TURkX~bU76TI>8zM$f^}eY5Vm<*8>@D%sAU**zaUzi2 zx`U0M?qLPqx2~q%nSDF`Et+_caW%wdjs6A54%zpyoh0+Kz6ZCet0_2ISyt-Z5!}gP zOUbm#q4w^`o4^?>T=TdOn99rHg2_9`6NuN5Yw(xT?*a(pQ3A4^ zz(d_E^U^^^J&qshE{E^K(gmij!xlR0m(H`7EIprUe@gE%AMaThKZgZQWY`@*iQI7= z+3ucWp>1BwL(`3Lk!~Io0N;sy#|I!_D;*tXRdR}coo`L@z&VZD#%5uUboJ<2wu$?A zrmz+dWA#6oK4|&Bh2Lg82yh&aEBFNKI52VDJy_dRk~%1O0d6ZHq5_+7v! z$H9i;7QcYP)Ax$B*ZE(;*#Oh#^32V;xt~uzfk}jC9h2lPge56yP$`lZx#072A6j4V zzl^H=ukgV3{_xN7H!+mxe-ZtMH%)*|Dk&?8WG<;?zr}YbsVYJJ8#wD0F?Ny(&~Nbu zlhI-;_Y8_Cm!uH4g7Y{Z*lsdDa55Hm9ZKNFB@QF$ZwdxeqS2?)hf~N6n95rI-^bM& zZ$*C`%op(qc<8}$!yFFTZ)3lkcMCYnxg&1T9l%*0yJjHIFU|_?kh>4q5W|VVIq26> zjPnDml*?eD#WM+NFXZbZw$WDdKv+VEtCVkrLM3fc$+WIo+f3!MDIl5e9#0E0Xz` zZYR#T%17)sw$QL96yOUR^+q@aG;O>ap`X+{La8=#qg7&iN;lX$q=7Bq7@!0TEipYu zNeBh$f}{%PmB=BI#3f=msn14Y${M!`C(n~;ljaZS(Z`)dK=ZKCaB%|}vSk#Inzc=}R^e%+(kIc8m}fGoDp5;Mp0z3Q@JD?aOB;_(P#gz8l!1_|bAei+iB`V8b zU?>QNSTfU#S%X)jafV=znNV+tP+0t~d;%8rl|fvzsBhV0`SAYX{jJYUjsuh!(VkwxGoXiQ1|_(7Cr&sI7LClD$`!8M0?NZR zWaetERdnO~-iX;d-!Q=;X*^vHQV@s^!ie4;(e%eKzqJf+-q{lLgvBgL~#S? zfIr#cKGEsWg3>$lUD-PteoKfMKt}5vp3Q$h&74i(vaC+_KFNH+`BSitx5&0&dUocm z>bGHQ4J4?CTBx7`2}_)~xyrphr5smyq^!nLco^&&A=XHxNo<=~CEQ|O;j!IT?iJ>; zLe98(<|adqG09C<3zs(<%Ny|gL~^)!dIa&7&S9r;Guz2Ots9S;8s&R2_3OT@ZX$r0 zgd)xY3Z&ncEi>W1O$QmcsGF!~Qy%N26_v=;#|B=A3?fzJa!}M0%%gD*=a{o3U{T4W zx5#Y?S!rK!OGp&X?8-m|h!Vhhj`kG=2g8yLoz&aIN5udV5&_|C8lJ{xv!>Z5K9M{? zWWP6+tFeoQCW<_aZ!5GUOqMOzY=^9u5@qK&U3mirkzB0`H#3l1G`6#Fe9MOL7xv!@ zbD-(j??MaW7UT{nyu6w9zXz(14K(EmMfI_XrlR`Ik?3mlo=Xi#)*bqO299?rDyllz zG6oM{*x8{SlEE zYlNcMj*u7CX}f8Q_5M9=70Rqx>Gm^F4`ZWL*=c2F*BQ~9v-Gf6-7L`w7%IbDi7^QZ z>Dy?H|F?1IX4bJOJN41sELk@yL1>L9qVY%zPp zD#6FP1QXPdbu2vso=mv1q2zcNY)-L^(8D|(X7sqXj?kQ3Q{a9E!ZQG+3EWxH5HI!bL5K=Vf4%Ad{ z_AKvyuDxGSG&Ss$5mfkUw3a_H@rqgkw9~j%Yn5xHmgsL_RR7NQ{Zz;>_3r@{v`bLV zBVTxHweG=gAqt6ofK+9;6^oq6Dq`ySN4!CMk1sZyf55YG9s(S6j%i=y8(B|c?GbzM z2#|V3=ujoxfQB8eFOWiK~w8cd7l{Asqey**x|%;*N6kRc!B%R-1-k zI3-?(`yp(K>i@vr)?bQN{jP;E!zO3a3|cR~#AeW{Q=EV=VWITt(i5f6iCv?Hi{=k( zPwGW{0&W}FV>me^e3}IVvEHcM1NX(P%CQZ(-l`nt61`EmdsuB!UM|VjC>vD;pyn(NyDUlaBe>1OyzOj39R~!m(d0Jaq~C6R5wT^UEwY$NU$Bks zqSAINV+Z|Zgcs4+00rN=`ijo)pz3lOf!PEboQD(z1;p*Tw;9E_Ltcl@R1~3(y@=~5 za&e>|zu9a&V$Zzpiky2wp(N^rnE(5L!B(r?sx`f8i!wdTEyK4q~^yC>1{} zlnWc|A8v{?Pw?)R;s^Q(XNda)(7P$u7BRxLAMYLDSWidN%s+mchLVVY(*`LA9w&VJ zzk)0HlKNxV)dtiG#-&mjW|b@=Wwnp&s7@X+OayD=(8#b*97c+RY(5zv9z@Bt|zJ}CkF^c;C7=?n#$gX?d>K+Zxd28$8)&bM(O#5(Ov*m%F zJy1=fpVW<(9m*JR2@=ZyO!&yK16KRbpf(4c5tA6=T%gJZ28Q3(Z9pO{in!E__BjWr za{mDYjP!0h!=-`U0>a3)NR>SHlrh^HEqJt=p*X7Dm~IPS0Bmhr+Qj81s*BZ5RDJhu zs;Gz-wfssqsyANmX%LrM)bxIGqu%V_fSu|s@O6Y4|Mxf#3UIN76e{w6otJxI$p2@! zc7zi_K{Pc04{@t~sBbbjlRW@rz?FrO&GAQg)IQUQDpO|+m&I04EO=)VO85Y`8L8hKNU|gY%*xodhj}H|q8C-$-1jZ162buSuPhU`scDnsX z_Dxh8p+n)V7q&tQ>%IdU&a zc2F4a2mJZ3c!*mgXTs$D4sQH8nLPT)z{x9H$`vz1x#GTJzO=uX^?Cn#jnK)z2rM!< zm%WeQPraXhKl6Te5%L=yyS`<;Z@r&cgi|22b436LC9j-w7q~IOr)l0Fj0@N)?mYwM zOxtE7(`AozrcHrhZ(!U|JQbivm}%L@m4?>abvN3HQbSFZ95?yEDxpR~3I!D3xb~Vo z{)S(_?bNpHYfThh13UK`yb&kJB0^U}+Yw((TI9v+WSwsTkWq5dGhdpdjW)W@z5K$f zX5;Noq*d^CL!P_5%q}fYn9LHbGnMbzJZ|@im}E4L)ZU24xH^T)HfVjF9!5|t z!TAIn1vceriASu{&_*LQ4!RuHLymdL^VKp_m|QhUQTwVgIHSf}YUe(&JQLSz>-0|R zH5{7Bzas>~aHT!YzWM<5Y>QyI#ObKQa-Kb7a5jCyP|?-tN)nq%8X zkW3nxR1sMOfDN#B72^@$&s-EYMSm2pbq6_873JaD9pizmuYR#euC$_9MX zVEad2RC+QV>BL!b7e1WE<0IQU~6K#eefZ^PMf8%iTYwj)-J zC<$r%u$2;2N(Gt;pti>}Q55}tZo7Er7${fai4>GbBTym@Fu@;E{taA6y?Y5tBbERU zKa?=Y>4e95?rvs#$SrN}YhUjS!J~JO$pzt+jua&t5Pr2Yw354*|3G|O@k{}p@xw3} z46hXF8|V&y2QHTPQ>*#yee{Fx>2o|{{|28G8K#1E}D*(O5<>jXF^V?OqCwRK28V3;IvU^#!D3 zXlDBt3C$;QlAQppsSG^`I3B{@30eH2`UVcC@ZW_7CBqaIa8TI^;TqK{RxTH> zc2%z|B^ffd0Csd)YSJzb7ZMK%RVQBi5*sJ-%kaVB;b-E|)$0>4_^0tew^XStZoo;_ zt5p0nFQn@&wd2Ik;g=wXZU!O5-TVT4$j+Vhi84jfCvKo3gLu?OJ#dK@e>vZi{a%n&d7t?`9n|mmWRALrdag~cVb|(F`4#^@ z>kuU!rS@nca872cxdqO3=iWx zOySVYD+e@{%0oc*4DWr3hg&?9d8qL~wn@Rq-{je09!7ZhEgl5$sYmM$+;lc?ypCA)G-DHvG!dox#|=63KiQlEUv~lRp5tg yC^>>zHeDPnK2|(DvVY|Dp(l$kk4%*=jQng${X0=SJ#uJdtXL`z;o7-_ul!&9z|&X& literal 0 HcmV?d00001 diff --git a/bin/splunklib/__pycache__/client.cpython-37.pyc b/bin/splunklib/__pycache__/client.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..c6a2a29e2f61a45972b410775b5380eb867a016d GIT binary patch literal 132015 zcmeFa33OandLCFi3Plj4xKj)D8WM%2Nr0r5rqHCCXcCmzk|3IZw$f7dP_FF7LfU6$Hv^wvx&Ygt>{wBVebiXl~O8hoY%wG-% z=kN*sWjc}Y62*j<^iuU?F)6>PVoH9~#kBlpiW&LM7PI(G*HbHaVkKA1CHY>aKCqH6 z=H)nBA6zLE3v!&R?^qcs4&ivf%hz|V3>Syxc(A@}Wp{D+%6-NAR`wM4tn4lBUD;RM zx3a&uf8{{&z{OEW>^PJ+i_le?! zH&Xnh_ek-W_h|9BceFU^Jyx9ZMvEuBvEs?qIe3w6Zn1H`(*W0_3@Q zobf*6ox_#SdNbZ<@q6Ao?_I#}1#i~7h~HW7S#Jk^FQVk{*W$zW7zar-^;rvzaHJrazeIEBO zoWJG0jq|tV{yff0UK!_QxxavO*IUH-qTGKO=M~SxxhLnZ;JoTB;e1J+e--D;-c_7m zmHV&ZyyjiQ`8B!!I?k_qb)47byomD^uYvQ1JpTsHo8BtUSLOUooWJAwIQQlJEu05l z3+F9)|81PFc{gx=L++Pwe$%^!^ILLW#`(I}#(7(wcX9rCZv*EWa(@x$x4kdm{0nlw zg7ZJ;eK*d(Tkd-}|M$Ev;{1zpzl!sBz4vhbp4?x;`Io%!!TI;dcrJUX`Na75y8nAN zkyyA|btWc))%sfFdcC$dS*h2mjn>43v(og|>Q!geZ{DbR)xdF`%j>PH%|@-_)Ecd- zzvNb`POIs(u2u`>d3oQtGCRNE%v`?YoVnU+txiu(dCf{NsS2O0G*_lBG%IT>DBo?> znvE&-)>PeXRfE-FZ%T4UK+Z?3UD%f7p^f)XmYvF^B) zN;L=^Rn}4f=HuFAq0qBqH+^?C)}6Y$UiBT}{wcE`C{^Q*_})f?4%VX@Zm zSaowr?nDdJw=Ae89m2BG^sDA+z|(jAb*B~xl%^|nHwdQ7^Hu*wtx_#Ja(>LYd9_x# z>QoyQv};X(0bm0HjcSEp(m-|syjJy(Ic~!%grlN*ug1Z-;rg}a8afxO;@yDluy9#g zYe;c!z1CVcmAZblP`$O<462@UqvlE<%^;#b>#G{bguHGMo8vnETBAX@sN#VULCbAa zs)d`i)>SEGn)OYW^-E>PR6jjkC^-15t*kcvmJ@U#9-0oY5C!Ea_8FjZ&gzRmA_2?T z)n?E-J6dl72Y5Jo%wch7PoFsb^f7spb#(o$p|pm(QawBBdMmXC-@$m?Z2I2W(aFil z(Q$bZm#S!3Eiio=(~UT0+AyygHC`xe8qwzBH39rp_eRxOs#WV=;4C$LfruF#4Kj!NZule&uQv=e5S_S_{7zeH?q$Th(BE(wV8(HL?Y^ zK8~qr*!gC?uCS===(d>wKxGws7Xa41oMa;e5G(5?9OF*x$;pae#e5wbRktTgS6%^c7%dai2fj%k7uT((- zF{Z7nZVMCx^CZ)czc5{IMC-qMPDy;`a? zo7ZdA($%W#RsGI^(p)oM>O%8+wb2>mj1ADIDx2e1L0?gnCIg-O=9;bfwN=jjs;8

mb| zU8;?SVxfoKkUAQ)_w&^dai&O;@$UU$TQI`a8lZtzwSe6#eePlTs8<1(R?|mIrwkzv z+7EwTvmQ5Z-kiMo_+-;xo;r2n#FJAqt>((@qd_N&>iu;=A+i|l0gNI3PgFQH%pdXn z96kZDqndy$l%O008Hk)yx^wsv=<2dt4{^;PRy$a@>NjsTu&P%Zpw^J>Mhl!}y3Ergiwt>)z1c&3vtl{i&OrB0z#Qp_8U zhf1Y)Kuq(^S*i5568;f%OuoML-1+G@FDtIfxmd$kzNuRDX0=s$lk?)ug_+~G zR_a*Cz;%K1dlQ_`n|9%TQ=$3f>bidzW!@mvGWg1;GD*2|FM##;l&XSAS59Y-XEHI&lF?Gl! zIL)7cmIGL{d_10fRkgc_{yM`O+9GQdB` z$?|y?zrhRm+DP0^-TLfC@D%dz)Sbje`fl=eW&_Hx>nZ<43o5jY^udIO&%4Pl zCO1+WnHH|Ro5Z!umr{rE{yT+N6B}GkvhKe~B@**%i&*irtU1xDVu1sTHi5A*;xA@` zGtqMWGL7mo<@2F&>c9g=GiB zEPYhmG*>@$E{*W3E1(2~fV%`bfSFTuAThThCHvLZn%}5;lg_0jA$6eZd`v?{iXj>U zLz=1?sL6T7t*=#u8)WOyOW?eQ-!vJ+6$`IvXF&s=1%R0)Nh0JlXmoR(CBL~cDYZ?j zMMb|gja5kXs9aXJb@^)4rk#1|zBDfe9RV3(6pnnXeDi)Cq=G{jR-=TqX$2XyzpJGR zltdj3BN1El2|<4edf~K8MJ8p zHBQ!Hd?of}k_G(BCwC_gCH+t0O2y1gcx%t?=cFT$%ukK6X7@d`fyKU^@-p8MZS-A>+4ZGgeOKH%ebBW2r#zK*BH z)9rz(Tbp((_4cqFSdqAsA>QbQr~P9n(Lau_PVz?AsQ8n({GZ|@3{xhNOM=;YjL+Z4 zFOt{JU=7{CXC)XkX7mGQzu(mbz4_| zg$1kH3yFNvKaKNP?Yw7eZzb*|Hxk!?Id>Cx#E^0qFuyB8#auhP)>=A#x^o|xZE{24 z6?{mO6n(RP9>sJr^``5gO-*mTlYw;Pb#fjgVy{Yx9^(Hh6d=`3R%Ty!a| z{bRP7n3+fpC;fByjo}FINQ=LS1I!2D{cX&MjpX&5e`Ov{hf;&lsaV=MD+%71|?-9~B|IJLp|BS zX3MQ#sX|NIB9?Hrb&@CigZKrA&f&+O;k7iD$W9?t;Q?@reG&*7d0AlpQw695k3KmqVt@)`#@ z2{NbJDJ_AP#luYJad%4wRZH-|Z{w~^_e9eF9FA>O#H>Mlq$-dSj1+NOMgdrcpahd~Qb`akvZ-XKbpD=d{qXA&(u%V$7U3*e1@Y9n(e4>KU@Nk;GO zz`bv%C%wX9DdkL}F%)|`4f>Sc$X*-7r?8Qc77qavcYZBL$~-M_D}R_`5&@c&UMmbe+mb-RfHGs%;O`tzs$8_H`gxA z?X;iSNd92zoeUAWjkWn!YYHSwpku%_jyJo&^{v7g&O%D@&-^h9W-Yly`{-6svv0^` z_qj}W2G&-gma2N=IsbW{XF1kEC#Nhyot)C)s`c|FJTSh)=M?cT@N1S|Ty;A8qV+r&SxYmtAF4?V_*(yPr?|GRj41z)Ijs%(u5#Rslnn9fk_Z1h&PK7UA&A% z7k2r?)!iV1isefUrH24AvVD6}a<78>@@I$vaVI!{uU29S!21doIV?@dyP%{SBqmAs zHsHHJGlMZ+8LbJkl_}e0c1<>|WQ=_ErX<8JlNpKQH)*5BS zI4?l1DVMdLuN+#?AlF={Sp)31Hd*?$Rf;PjPKhen8KXo`eJ-V`T4Ove?a*QdNde{a zNafKQMh)OwB!>e+|w%1r&S%etZT{3NSu_D!L z+8YT%45_-j=t5q-z21?tL0w;tfq{QL_J9>&w!7Q)sEObD{bpR~l_4-Hk5 z&X}1A$D|Vx;7fH8pqu8D%b#sR+wL~vFe*de47e#E$uTP;T{Z$5`ByQbgq6mIKwjD` z0V?6O3?;BG97p$BG+yafZ~!J32&xBHozU+9rc8jE0~={~H-6G|&^5BAxRK?0bJ49+ zWf80g5Gi28VfUA6^JQXmB6<2gP5|JO*=ETdFslf^D4~^+AY94Caw}* zlPc~8IA>A^dP_K(+(gL?YZfvkgAJ3c&*|T6KxGpu);J!T!aX&4>XXs-nkMxo39$Dx zrM79+!g86oEed+vfuMqELoW_twb@KOOq%L0LlxxIM7v`_Ylo!j!+sAIAO^TdRps(x z&w0X;Cras|);$8>0Chx*N_M$xoFf1RTq5vn3ka;pU&NY4^?TL8$P8>0PNwvJLt8dB zEZsPBYUe_{wb7!@qnnF1TA>-hvH=Sw^z&`>Xp>z@uQI^*t@VoA^V%SUg3Ue~@Uc)X z7QFA$i4mxXM_y)qWGEo{ z^E_5z8W;E%oWWCJg~fEJRVrqxS;)fK_aGe=b2!d<19CioW09tCJ&)r89rtiNh+{ZF z%5ee5aDh>dBi^GZW50LWJL)}#8V=y@s5d5e58`;-n~>u} z-Y30dc=9l=9rq^X+Wj~dhe-6}0UV1nB#s}%u{c8F_#qrW={+USANewbkmAD_pr-{5 z+#7IWoeAC0jBggGMi=)*-!J0P%w|l7Mg@uz5O^xYBPDF&;KZ2FkYec1z?Fl)^Wta( z{c{WcA`LhQ(ilO`305qqooQ!`nYckrz$i;GAfh06+LJ_iq48DYZw0fRx7q{~M5Ypb z3Yz2%$-vRNCnXpUyr|<3DCSM&tQJ~sNWZ07h7gIuT8c1JKd_Eh8VHl;70XZ-1u?iA zaJ4tkCt-I$iz}EQvaE1Hk)qH>FWSt->UtAw?M?c|fOeY2$XTQj3>U2;aLCw#4T^; z-J|Pqlt~xLNhYD|&n%fyjNb->y_sr<1xZzXEcG9Xqt+_}9H6vE^T=Y;7gN1y>Ddvi zZ!Y9d$hkbh}&8|_4ujBpE%{Zk3aGFsVCf#ZB-*4Nl;nP)zpr7q+g~M^~Bi`#TI!b zDyhx_aJm+&btSJ)zJ3S!dOC@m1eX6us>vf(j*^w2MRCakWkyw~WOVhf_@F)hPz5h#QubOtZa zEIe15xqNxPvm;Uh3~Fzq&t2jntjTa?{9z-nC35W{13KP_fsKPKV58Ah+3%!t^19~chD9t`@xDa)<|f5if7puN)oO72)lnW-R!tHA%`M=lGZd{EiUz@%XHZNR;`lXO`x|!BVZ|84(36c(|ub;P|}oKyp9;%o48qAE-#+ICQ=Vb`Ep73UzTTJ2Ir z!6H&YWOAi^7me)f7U0dk0@#!mUcWp$-v>W-NGT<{eguElVo9#O*8nAQKL(SniP6{l z!$mo_D>5Xjhj>zol}I8`F%O6w(+$I&*o39{10( zz}^$gF9VLy9SJxUE?&HJW%fd8=7rfS3-g^qSU?}x4n@8Bf7SwbAjG)n1NQ*kAM6iZ z9V~Ef)G9=^ODYzri%wZgl-pZ^t23aB=nM%4UYffw``YY$Co7lx0BWG7s{Oi!QLeqq zAfUcy!9zmD1yd#@4T{QADul|CDzSN{+CLHnRIQ;@HZ%jci|~F`bJ1Ye){EW(CY+{m zk#gXU7&zCf>&L;EKtr+W*8IpqH*G!fyjle$&mwete;6SVh=~tD9!pIaNZBcIc~WjA zS8=)pF~d5F*ZvWqCM?7NsF_^{d26mh(#1di+9^xjD#;lA!^l6-Oe#DCB7Uj5zk%hB zY8M8najwxbO8Iyi8yA!o42{});$7nVM*kBbQCR=^GI%6l1s>@Ph;LMI6|;?WyOUvs zehY_P)Vq_FHvJQeC)sd0lRFkm@I>gi)3xyb#VA(9iMA9X;TW<8M|h)qBiYig?&Ur* zjRY`Y&m{~g2AZ6Dwa-k+sRsUr1wgJn98MIJ8pOQSIO;?x%Q3gSMA&+&zkE)bd-l3)%J#(*<1L)(C!~{|JT# zKhqzhJ|=I`gF{1u`wkTD^`>^63}5RrNEvh^_+8t-2ye<8+v?yJdGc3#h4l0Lk&as;uJ6VA zZpjh7dN+wP?fyedlF2Y)6WF#j73#D6zo??(dJk`G?cXU2*^9ID^E1z4B^%Ih_5lOj zhE%xSA4T9`cQfBOdMpc<~zN^`EC?{bxpWkwaK%-itq@Rub|MnLNmA;Vv`1g}a( zh4i`=^k>IH@%*MPdsECCWI2lL`eSXy^N3Joqi>n`IBk(WiBRvfcRU&0H} znbzFss;mJ8=VZF_uZUmK*8NkDG9(K=NL>RYItK8Qa7GjRuzDz9j*X?r=qS`Rm{~4& zi6#*Ssn^tzfiQ_I9GL*&toYW>7&dp3?|@2tALcNA$b}mAJ}d8uYP6d(%t{6OEX_C= z^7nGb(h2&?QX9i^gTIF_c*>_pD^!$TjZrWOy6G<=LJxlMns&e$G;s%B@Q@baeeMk~ zvSCA3*E`v}33$rqa5euveC2Pw(#r7KM&?jrBYQpJ|Bdwncy~Y?N)y-e!41CmYT}(M znM7;wdcpr|$;9nkGSMDt6}-WBlS$lzU;M!B+!u1MC0@sKui^75p7jc{jt_v2?Qp*r zTmnKM;Hpf+Htp*D1r1Ic!~5XDwMJ;shr?5?j<{#ZdZ9znme%Nsh55be48PVdf+j7(dGtE#V;ySt3j<~m5RZbsVS!R^e!5@>l%)n^ zzKxcxBB~y_DG=`W5=!xJB5*HWktW-=+}a9qS=7X`AH`r`0)zZi^XWYouTb)d1PG)7 z(-R`6cE>X>b^E6hES;=CqP<@a2w(?Hg~y>NoZ1hWlsbBZ<}W-iY8K_j;eQH9*x9Y2 zRx%FKZD*6HkA}67WnoG`I!D%8H&;C3+k0&uf-q2bd`Bm5tTq0-tSTp728fM727;E) z6}FRZG;jK89Qm*D>j!yl&<1b&Tvx{jl!!f`Vb)2n1k23C!6+OUH;5}p3{qQK>LfSC zNI_1Q2aa=o9zkh|p*^Vs5{s9Ce-@mx;iR2U=J45_JdzA`IQl)y7LgxhJI~`2P|0C* zIE+TZJcPRW4x$ne3nH<#9^#G=&vwkq+)g)6x3bV$q!Fc(LzK$r)UlU6oIo5*rm;u) zT&b%U;A4g8mOP_dFyCn>8EpU9B_ySx*c33S0})ZDErNPh9KgXeKz^ce5et(ZM{*gu z6H!c52ul==7v8;N%bBR7auz#K+yZhlh1?`eyO#QDHz&o$gdBBQlp@L<3TUkT@96fWmIkU_qJUJ?T549F&Z3u* z^KR;_AG#NYT3O||E=sW>r@Wad-hv^WS9za664-}0YpR3>3l%OD4ru|S79;)SRdU|0 zk9=oAt3>+R&G*KuF^?)DFQ%c;sLkTQbfXYNxHp!`r*<_#^;o5;ljjtJ4UG^T*nt8s zFPv8ej7b`|B?iCCcyB65mdbGqH2Q@!1&HuhFB*P;Q6EhR={NBV)_CNW(8*(z=b&n< zZLey_X3#JJ!nOdT9*lxTj=P%uZwpxbrT%ClLHWMQ2cvVDVIW*jLI8lOT_DjYq1*sD z9v{f;Q!bfSuAI{{v2BT(00x+|CjG|d*{)a+k6Pn!E@R>O0_c zE$ZY{o2!-p9tjoN>ejZ%d(6-iSLsI$g&#!X=b5H&UPwuQQXhFJH0co9J@xrJBab7= zUvF=QVQFMZ!y@>Sg~mYpfLZq9k8BGnKNer~B7Oy+m#r3PcEqh=dv>tU`D0#}V9Wqf z^E747(0{_z6+sb%<(v2WfFy6;`icccF7%HvH)CM13gr_(G|?L+A>U0fdkZr>;0hwu z4^#fjn%HOf_Q;dq40CQ)WHF{BQR{%wJQ}r;XdY?A2Y-kxPgpC!$mxX;N64my#3|aF zn)rZJUg1<+z`|k*6#h@8K9NL`PHT;UM7kdH6hmu40K~Ulr3GQ3I;<{GL4i6KA;hS$ zCyqUS?D27ohL4jckByh3dXxw|Q!YzwWjskWkSVXM;}luPoCPaj-<#`%z8>i_6xXTP zO=KBFS(fWu?V9Prb>1~&39fU(B6dup+l%My3}~2il9Mo!zJ*31GMq5;8EFl%1+LVs z-iYLX2~Ye8>y;oaA4QPb!mNU~YP{54X8z(}Vqp&cO8>)WcT)x)~HOpVnZb?`;bret(FNxBdVSyv#@X zvDLiUr=wYUvG<^NvQqKiiS*2})pCca8}Q%hjtr6g?2xgOzH%Zkz{?;0F3#;zQy{NO!H*fwRF=ILLo0lN`zn<%b6GkKg&B zLSbMipT|f3?HC%&A1*vJv=`SLxj#IVui#DlM>+ExJ`&R&4Run46glGyB4#I%HGYQ# zHX+=}%Yujwb@H=1?#2BiTC`vzMMCC*Lbgo{t92>hr&XF~+!G#LUpLV-nvjAOVWL+R zSVIc7$OOI7qQUlFxfG^4E0-hc7cTBNrJ#V--Nt2u8jI_lqXiyC{6T$J;3#5Z#i6@J zf;!A`Gjqc!ix3-~ozUDf4qIc;jgu*mm`DjbdUkd}#IMUQA+@#&c46EGiEbsW;m==w z&T|At@C7#wE>kevGu4g14{1Mgblf z`<3Pu-_Wt5k;s&WOdzTEGq-bcC$o{G(5kL}4H-bRRVVGfItZ(du}8QLp=Hy=8Rszi zhJbxpW>VIOyh4KtMzX4rBewG5x(+PD5hF&0f0n#Df^0H0>IuqI;o~YcA8H^(X$LBY zHHXRu>%&>5D^?!@5+eD*p^GLSvUMb=A5xQLuO++i<0l^1wt93#@dLrEGyX z_%Id7`~CZHPx%OpE=&{Suot$JZKolK^w4O%`lG9fa3R)$GXWxNG_~W;;-VD1!1hOl1)rm28qL5CaQh}&AOZw5$Fm!i|G}BLNG;{%Eqhmdt>!fmK!%{HgFtk zEE`91!I7aK5*#1x{XStce5Ft5jD3~QL_$6EGB*|rhv7583O=Ty(PMA(&Lnb)A=^5# zUcMQ-$2UJe%6^VcGsMOPMb;o=h$twH;x%wj5sc#k((ulXArg~=M>#Q|Tr=+}-pkDqW}0)KPz>8DRSCr(Vuf9Kg37q(H9 z8ef*ASSfEC@1Ws&mkPdDsW*e_cHW}Ydz_ty#+olSn#V=3I0h5+t>Z}Oef9(n5&3%@ zE(Ua8fFkf1sOIqs-`qwGe8Ixx^PfS1l>G{!K4=8lU1ISQ*JBD2r^8g!UeMA5RoKoQQ z0qht@;{Wc2BKEF?ctIr{iSZ)B*dikZ^jN%XQHDRv3R&)?@jiG(m16~dRRuH&CTLE2 zpNt|Bkh_b>F@sa`kYesJw!@+5D&qevZ~uLM{T#nU(C_%FYdojtp8}tGG0tZSH~ulB*^_Jin8Ey*!Q4{LGOL&3N1~)*Qvw@4 zs@d6pyBSQa$ql8R&@LI_xP(i#{AGVe_G2#c&m0#?pPMlb?^*DSh}Fh2{-@1SKK(IA z`R(8+PftJo#PpLV{**b&^I$HaO!*+T^5maYwz7*0gy+xCllv2u<1g??IKm&sk!B+O zU*y&8@{W9wI?;e`Mv0T;9q&)JcUrEI(-DTD{{%PZ=D1OcIU4Az;1`b8XTcG5sIaWU zt zjWl5odJiFq*L~GJ-VyI%NxHQc$By?2Io{`uc#lZBuSdP3_&tDok9nhV?;wuHym2`` zgyRYClX84m?TLzAStq?Iyz>C=o$yY|y$8Kh-s8CTkRGrkqlmcIx=FhM)RzZnkCAU`FF_b@(E|i18ab3#Ge1Mu!9VTEmXOo%57-QbopT zEoNjpxR_Rd#k~e{tu*;@)xL4aWAvM8lUl2>m4~z%ZInEUvRRdApGA*C*CFV6nNUkv z(<9c6ctID@Yi+O<$I)yG}15I_l9Y$mcGSRQbTWr+O3ZgbIVs^Vq_W zY#Z{2u~G5uOQ3nZ9^Iz|-F;&e0T;FUr1CWy4U+)MJ?)?Gbzirg!wsqKMf#^1U4Edl zjEHkCjWbY=lWSt@;eEDLyA{Wka#=N@)l^!lXoGk~>88|G0GSpf7=xVGxhW=QL_so%?#*94Y-RU zEZ9chLP<(*hoO>}!|_qW!-BL{&U8@0T6NZTNc9uLhS&@^e4FkNYWUb>ViH9N#9l)3 zHzu*6Be~iP7Tqv@Xp-8OicMudiFfx6 zrE+`gjnVL30fIJ_M4z7Qs)NsIG&f_X^4v*{3SHHnozSTwt=!tO>}9&{+{7+j(c9>} ztMd>Fn3Sdu0OY*t!tco_Hp$bR9T>q$gWIu-7O@ptKi9m;Ae&=w(5tw_BW498@MUhX zu_{Ndj1kQWqAt^IbFgtQh$T!qzMhnPD;$sGGNXfa?7(%4Q=M}I76xYg(_mV_^Hfqv zl|c2-!XgC1m{aktXy#zs!3e87>?*CFn3JS~-0eyXAJnOFR~@!R>y#N^z#Ks^3^lIH z4Lx;L8bf(sJ*p-)cE^B+BWjbxfGkKSww>kLus*4y!^_%;r>JrztruY~!PXi{1Gw6i zgR$**(>i|-Y>n{xfgjM_RB#Hu<{3FpbJv2qNj`(cD>K&_3a-~y;r)z_I)bYg#T&lG z-8#7J`{EUlD+&A*^Kb)~!E3zDyTa)Rlheb?P2T-(G&o?$3b*OVVRIAY``M*?Xw&_G z)eK~9~-TE_Z@m|~F@Hy+F%vN()&f^p8 z!&eKfRmR{02?=^()|9mS%Xm=s9txbE+j(kq<;kKwf`$SxcsJ2Cmr8d6DmHAB*p!OT;6b-xTZugu2-;YF(j~y3JPRSs?L#rT;GcxD zV`a7DQVYS44Z(FD9`c`pQ(!zJ$o8UeT8*YM+AH%qswG9zq|LUdeSFrS56ES_WVL<> zVk&m`Xuu@I`!OMu)6V@k(zCFW6+3qKJj|6!)ptszuW$yExFz;;;|NMTLJ`&7*u3N1 zlX-U<2lN1oj6mkbbH3H`!{TblA0F2>Sg(s=ddy+&&g-K3-AH=X)wQiAu2~ z;Zgb>L%IGT8yGh)bW{VFSFBX}Nj880Nu=WytJ}6>R%$Dr#0k9`TS$?C4Rx@&26JBd zk0S7%P|GR`M$emTxp_rtQ*-Up7iAAm@oLa(nsMa_nXIbXdq|jmDvg|@!6fA1bEvVi zcGLcU&BncgFXP_*3@;qy*QQaC%4E^}6}CvZ z7{TOY5h8rRHiQ_@aS;cj$8bO#Hjyr}wGp;9L9@6Cqik%14TKonm2|%ZF&q&vj%yr# z$n2{Dk!j3D3!~v&k)5YQYq%XV#34rr!i*^fbKFHah)iH`1eM(CT`*A!s-#zvCz(VJ zxs8$g&(aF}(4r^42+V=^4^=r#l(G6})D{Qj}x3(CNM9QRiACGFs zA273sH&v_|$v())dE$db6(K0taYwi%QT_;{B$z`q(_u03ZVywfzXHu zvnUikG*>Yi8UFIaRs?j-L|`x7!Rz##lV31|@(bL;(V+jF3Lpk`e2)0W;9>oX&9vk^ zvji-|arRn{0f_ntF6H_NL5O^!Q6TfVk>;_-Nt1I+z{|@%5?_F2a3lL*1TK8dO1 zKA>12kQbImdj&;TDbcGKe=EuSf&oE}Ld5?n21rMWXt5$RLO>Wnh954G`XC$5B;p4A%<07_w5|3cnms{ZQ z>%>TjRvbEnO`ouz95#B=#P*OP3zM5(ssIz@kH|?vCkXl|4oGe9Cf-bQ2S*Y)jey)G z4akH%T^XQKikw8ep=kbpkJpvi9yDzV_SGLtJ|DWK=-(yeMAj&3|%g{wpmuhYD zY%xDO@Cly4m#h)u%Xv4Awxi`B%gn>ZEUIX`_4G_eJUzGQ#TzKo=jM{it#>`S%_dMLm|d?&g=6?2U4ZL1r6>tDsXu|s!*x^BBtlF6}DPLPOH zZjg?+^cimK#_-^XLDhHV%7%CgyT6MUuP8Yef|JZwqZbgWt|onCC8SHJtl|?&wa~l) zZ3IMiuHKygqGCZJAE*xHG2wMsl%24;$Y|`ptRim6ZIj10v;!i*7*w%T31F$SrSP`a z4$v>l=maRw!GMZYw+z}vdT6#**SpK9R)t z!vTq)!@c|Q^^s;H9)10#SyB5_NL4$`j9S^0?K6MsqEQw_yroDnzm9pL0@X=wjT|9@ zPMl+v+;#sqC!X-m`_(3)?O_E{Q@SPMBaH5-92}=%&Q_07Ot@JAwl)bqsT05*aSe{t^B{u_#JJp{(il z(F=UucD(^1-U$u?XAlpCd6#LQh7v+l#+2I4E`=czW=LTu zh#BjOg(Zm-W3eP9jDrJ733JHo0S{#sg2Ny;U=Mhm9wxPbWJ5c|%292|PUA_R`=?=r zWUd(5@4e>^!*jU%l7R~X#2(gzOCGFO#IEe43$pg#u+%&J%JSZ*RIzrr>pUm{_046|g^^o!-(y z+hg{Ypcl=Zg9GSJsfFGROAr)@UxW;DEorvaP%Ea}!V53U9?)76k;oM7i~u(PWhMw{ zQXGZyO>ELyP%!gG!OIE>E*aXefQVmn$X(MwshI94^F=ObVqy-6I59D8Tmq=lQ(}ZL z!{sumK{*m!N3VfyHcmVkv25N+pRM zb;(>qDbGkaTY&9q#MeN3h={?9x*JMLh%M3%4c57%+d%aQycMA86K#(zpP;%mso*k( zIF8Eoi;R#W>&p2PN_t2KABQEiQmhrF(}hjTROFyPj2x`;SJ9E{l; zjq0%2IwwsILb+0mYV?Tc(`q`Ygips>Ju&h5QQLCFZRoaZ7nW9&u^!^y8EyXUNc*)Y zXR;~IgkhfwAMQ=8GsT$oIfRqV91vOdgrU@GKnxBAWi0*7#K}JL&n$&vgqkkT3W|nW zKQS+~pFFQ{DT3fcZ?Y3sZ|#`OcLsGQK{O6yb}MF3@3zsy@ZHI&Vi!p~gkL=(TC$Ka zRQJczakstDfe!|$hkN?){pYNnHhKS7`Sol3`bYfwFZd;DXIXsyJD!SujEb&^*B`C- z{!j6X;*Q#yoV>M9E>`VHPL@U)Vb1XAU+_x=q~GAtPxH%4k61NIyznr;sK0s|WKA?= z`O(agOe#Nse~0shJpVHJ;liPOW*Gkp4=@BD_p&NnKfi-VQbGl#*guq(IERm<%Zqv% zRx?W~!3m@BKdh(WA#b0zpIeM#GtmRyK^zTxd%>pM?|%>KY`-q^Ul}uP$}eRSalPZ> z6&I$w5ZgL&E#lRuoc#ndjb&ZoX-r>F9^pLuNT|l$IKVtH)}1=6w263s1W$pR_$>ImyJY+?QHW%v-$Ouf|dTvi} zVnY9>H<1Y;{+W!#&4^k~{PPa9OcqLr#UfqKr)k2kT!m z#$Erj+|cMP`npHhsG z15sn7m%X0xH(GfQnPj!e_=qx7BHL`r|Do2PGPgI7mC_raQU29L19DTV0L=KO$RUd? zMD6=w&_rSlJpT{u^QkW~qwJtJcsu)rEHlbBQox=;U{AsQE})?@M1yA2$Spz|zu0D`L7-WhyL_ zV{JK#_`r&rD-a&*zAXh-N#HJhQQ&bQ8U2X!j3pjjS=VYma9$#~Y6TYWL9sO&5%k#J za1Sy*U|&_1QWrg?lS#B3+Xg%iro%^p>b?qq$5j>Sfy-p9BNas7v=wP1851&`w~)iT z0JBLHMeZw1l~9gsnJdOM=FLT|u*Q`gr0}bI71#I1$?CCrrV$(**qdz|2rq%PfETF% z(Nj`n^cS3z-c|NSEvUgQQ2bU@&R)CwEGH|=h#&|yqsK52V89Au31y8XFla4$uZrV` za;y-D!49dE?|8wsO0h%EGOXtf=NdNw#$8npy#;OAL^D;i^bc&IEkc-Gc~u!u#q21Y ztc1@3Z_4FYn8{e2+@O_#Zy5^KZl~-Pt=JfUX3x`=3!n&5Ae|%(w(9pt_Z9EE)(}5w zv3$$(+as{H`F+!QeA^8TpaCig0EgAuMsy}em3d4;ncy*Ja3`7t!K$PC`HMywPL)LC zdF^=^wKynJWZg!^*0A}_Z~&w40^@Ach=C?AC1xRJqLggm%z^Iyv$cvxtV}yY<+@OP z3%ka)EEO>?IO0;jp}z4}Jp#8t$TG6|dKt&>~5AG^mF!vaGaBCqpLdt+J@9gBfC4*T)G?Ku^zv#UR`d#v-r zwMQU}nSZlU^-bhr-}Zfz{4EuRLrPaz3hLN`7M$D9;<}3Xr}nY^nTz1Hp=m-w+#3Bb zh1QezCqO)B^k^{pX=F7*_5`?5N}^qC(h*MRNuT<<+E?*CH$LQFVBvpDaf)L$18 zZxygdLFyGK0pA%0XGf!9%Ka+<`X%^ph?71-1!SqR5XD;0r7OaUg`9tXT|}q}vS_r; z#IWcPS_1c=CYxfK5m_5A(eswEP#9=%GhE-Dkln#BbC7DmRd6XNYRRosm6MrmhzY_n znjg9Zh+s*a`XIr@meMOt*0*n6-C~jWPP=l`u~~*`oqS=Oh{O%UBqFOmhQ%3~-^_0x z#ilVwjZ%~I*n*n=xZJ+iH@=^u_9+9qyQUQ~h?MwTsq{OXTlm0>84(11L~D7nVx|s% zIepIK6OabbC72p><%H@!k7=Upmf-s8)rPphgZ*UxyBKcmjok-n`2twZ0B8y-hu9z$ z0@nj==fZG;r8S=%t0+X*z!Y20d)oK+TnOYbtB4>LTfNpfoV4gO00qRg^tB9KtD)1PNB(27tp!yOsdqE* z8|9W4IbUyfL05R14b|pv?q9|9IPcag*m_O&sn8zD6eo=(g7^VpS~2gj&0mZgSKJRc z3?sAeAQIN1)3iz-hz+}7EGS_~SW&*?kh07zMRxE+>nbv~svt_yY1u+(*;9B69$r%^ zah%sp0w)4Kf@WJdUYVdt%T?K8RLmzPoV*L)GjtW)1y&wWL~>TZ4j1v%)>ar*qb)`` zX@eK0QN`90ArnS@-YYSO5sCnBq)BQb>`{$@Exo;iGZPN5!QA|mt z8gw+Lo{r>rdvf~Jr>0Mw{=keJVdA}e&hNHmZZ{R8tg`}wI#sdxlti47?QB83#so?jP$k6Uf+P~bbvZHgbX1fF_Q(qHOf-QE69=W?9(i7~B3lU> zMg?Y76|Jo!%Tec76z~ZkbjM%Ei{^gB{|8*3 zSiV5Go&!mc;2&j75!=e|;94gKWdcI>O@fbS_?(Dbc_A~92vvv>Nm&Q}WF%N!gv|dR zac`XTi{286LogO5GSK|f$&J4>9%AdwhkC8M5=TV7%`%Z7Fd-B9AWrC~LkB+WF9o-R zR!RcBu+R!qnQ}h`vS*ynVtx}V)s;n#oTd{!#jkrZyii^dsADjMQ~T%)E_TqZ&3Lol0ww*ZXP{EBv;)D_zEn*4x##8|a>EMz?%^#=^HqvH_fnHKcVV=*-#X*|E1;)%pm0T-;du3}+hI1YVx|P=C-@tj z$1>xe~i{jh%Lj}^;9su0c$t*IsGY|W7pJ; z^y)?2OM2u8%xOGq}XjB=90%5WA-H6Nx?#Tu~( zs%6prpDh9_4X!E4?&A}&g6qn}6GqjAbj-DtH8=-s?!|U0(ABa}5d<6cJ!bK`|ODH&zt)LeyJXsh&9_+<;=_ z7=G)kS6zEhqWK+X>?ANF+Q&ZD2wTrbrdiw$cS(h9R8J_1^se+NT=SA?*xs zqYEHLC+k%g*Otc%%8%=R4^7Rcz=ycAnkLB zM;TF_gV47xdA|sN++Y}qEM$w}_Q4(^AP2>&u_*kZEai#LRhJe7`kvDx-E0T01-}c> z=|b>BEd31*&nE2?Q2^3&AO9&*50Y#M(cv0JHw98UOn3};lQW1*3a;AeB5o&Km?p)& z+K#FX5T(~2B=Jfq|{V_z$te$_i*B{f`h$aQbMskvfy+!^oYOo}M zlzC7nAZ$sqAU8g#nPL4&8GCJiDo_4xl02~}J=Bw|u|^a`lPGfACg;X?^>m1roC%>r zytu*t&#d>i`9%$#XxIFI$s;ySH8b>ukRx$l{~p}^9@@DM;2@vIesbCTuKaWPL-~FA zgZZQR(ZZAB7hb__`zHoooCS=Ci#n*&s}^ZuGuk;l>lGwzEtB957ITt3b^yK)JA4-} zx-HlrW}<{N25BLZI&-W|>M~a0?|1 zoEgFIFxzR@VvXe!gdId2drdMwCW*LW_rp5Hmo%7kWlchwi4D0C9z~ z4j95A8Z$}cS*Vt=_|XLzLKMtdYJ0RQ)W%h{ZCQx>w6Uwy16&zZP*IrEk}qJj!$)Ti z3J?s0xl)lfuhar58Fh)RT~$Du$>ol&tgu^Nb#K(jhOPO~5OVpObY`Ls$3}tOuiu2a z$@PXL60R5(!Og-yg-osSm!(**_FJ>|KW7Z~H=8RovZMm`YpcX`>bE`v54>ot+poWPtBtAR@0 zEHx%Mfn~bDjT1xymYUe0@n}EHa87 zF}10ur1k2qblY^Y7iaG*G!gKIL^iBT=k1_v1iM=)$$*d$nlD8eJBeWg!zp=81oB9W zd&?|hwlHnh1Y`^P47HT7-g>=ZY)}u*TbdSf-v(VtBB0d(@+u#F$tNPuTEU5FXRYA$ z*KwlaX0E|n7946pr)T0?kS>ibD$ikKQlz?vRuB>HjF|Blxocwp zo&`vv56^=9MjlQv=3&P$dap@(A#0q#!Tb&J~ z5|qM-3dl%Q6+n++wV~~ki;GTYDhqXlcQMVdZ$|N?XEfGFD8boEwdh5nt95i-0<~RJ z!wblvCQYblPzoCLRxfuLay2UENFpk@KP4a5aVQ{gsz78NL>^bpVC7B}*t|B2Oq!2n z-o6!vsS?U|fFc?#$Ao8tH$$3g?q=F2*^TWzi$e32^E;@vtP#@oAezM^?5*vMnU7(M z`)1;Sl8$jZvTmx}A>c2g)_AspBqY+32&5G#RigmMKUqYvDaNpC77|vZS-@z7b0yLa zy&dFCxDNra#DGDOW#km9(Ag0|M(c(shYaN;vvG-8)?{gC$NMZ8ytRGqh)-`vl^12~ z$sbAjC<_CYg09oXiks1AO%%omxRNDsrF>*0G%L4{9z|w&#VflyMLQ8!IBP8BY93Ac zyFol7&hkIUQwBU*XNuj*+nv-FxyPY@YAlyXHv<0?e3s5v-S7V}ubt!9f61?d{9-27 zE@zDK!7#qElT&D*+CwW;F+&f&&>O@5CQAq|>E-Flw!f;FN>v%?XxMp5gE+lj}Q_Ba_ zrf~Uyza)-wXA-TuQ=dBTXQ!*cOuTh^A+oBvvsakqIXY zMt>5kWIaW+qxBMt`4{n4lq`5%OI&Cp!!)xl`#rs%NuW)jKT?ofn0;Y(Vb;zXRZyhA z(eOP4YUv#10xQ4&A4 zBzO#8QNkC^lgb>g8<{2e&3=WEKxy|4nDZ|qW(H2Fq(aZmF2HmEV;o~=qrU%!;fxXm zJF1wRAMfODEm}6F6q^#LNUCyqUb$%tH4=&U5boeAZ2U|c!ifAdK35_#ecwd@OH`= z{tG^f5@RiS*J*(hj}H?-Ev(Yd@+y4_!yVpDwku(qL|i&8=5=qumTfJuQUgIn%CQ< z4tm2Fy~pEoBU%kS3*P< z942yqjX2DHCNM)DOtufkTW2STg$t)Q697;R*-0s{(w50y{K{fu#uvW;H` zs;VMXRcXok1^bDY2ArO2!Ae3nX*E^C!xLXf8#R^Quh18ApaQc*WJc%}>kqr$85F3T ze`)Sww=ArQ#Xscp&+zLT{OSS)ok;Ey6q1T9pMuu!kja7(CmR7k5@x_4mhdkIm5aEr zP798PiU&Ct$~tDmJEtK4Z}n^NO9uodnHNtop7rkgf^_&I{9>fXR?s&@aO-~kDw^_B zwqI%XDFK^_CB62MVg$>ehHOAsyh z4nhZ~t)WX&%;C!0N}tJuZwHVL&ci#kq?hBiFDMa4v}-xNiz19(FnCRY<~irB#S5+M zT7g6=LVhI9n2CszEUK|{>nOmZi$st2#X&VY%(yml~juHD$@3#y)K0$s+dJleQ3^-~7iIOEs@j;kw&?`sIdl z6adjy@xTBV#821(tPhT`0;1I;R9rp>7GVy4O&3^ZmS6uTzes;N8K%)xS^s~N7r9I- zYV}7v`oHjHoX3O*lPTIYJIOHp60w+Sti&+$(=cNVwer6V{8JZUkCk{w*?Kwj!30qU`FXOnI-u(`) zb!@en&9p$NA|=dz4!+NQc_R`|m6(=;q_|oZ9;r}zEDKH*H@?bRNoDf`2y@Nm@5>Lt z-40lSOBHpTNP_*6b6;aX>JseX1urYm$Vj3on9dxPrrLi9yQO5LHuiGkNz;WQsxG?BgdsGMOcY?YVNE?QJPkZ>5FE3a zxTqp%ao+Mpt0~gm3rUG+xJ)Tq8%hnOnU|?;Sfr4=0I@#}0AUTirV!G)Mc4&a#1uRPAw|Z>!(8eNxUq2OwhinXv5}@(bt40T6we7BDM|E~P#A>6^Z4;ab?~owBy!=`)vdRA z;b-_o>{s5;q`4T&5-)@h=UabFh*O+TU=7=sJdkW3>H$hepA*%hgOt32*R>OUf8l0kwxBRP~C%J84h zI~9EFA4i{mlJdPDML_yy&;evu=d6bd8;+t5@P7gCxBp^fruWff1YNVIcd<0qy zUA)CRYBZqAax6<=RuJ}ClmN(P1y%rS23!Q?80-Kf8V7QV?E2BlAiEVegoIez8lV#3 z{WSKhCqc=~`G3f6KhcFs+tUCj}=`s z%>4YTFI~AXFGC3=wDhVsfjU6^11f+%90H5oKHS}2OP|7^Y|=Bnavq=H`|t%RG1UUg z1XoHUB?3NLN@SpR-hV<#i|}+|D)J0>aHRAon+Guwc?SM0IV2fR`)>leIi9KaAw9}o z$SnkAGqE__CUcM_6W0n5xpy#ZdqY%d?`PmnNtelj`y2)`B0+YeRfO4^amro&ElYSb zaxU4#-{ReO#!SU$B+?U(VP}LUJTo2M(SG5U1+#wj@+2cDFqIj63v?FWK0OU$JK)Gu zcHL8^K|?4lvj=g^Bg>9b5Pc;oJd{cCRou~024eG+Gk^3^5qTxGH2O!VQ0%M1861Hd zA-5O032tQ?HSka^G-th9c&{Z3v{bQS;~|fNlut&?pcc%1+8f@Fx)_D5Zfuz`Jg;%+ z4BNa}v_``J4x&NT+N(6wX&=*G(_Rs4{~t_>KY?^DK~W2sykso=U&5EkAWN<%6u13v z@l;sx-{jGA{1Uf0(s9M5{0@#_nGiA{t>mhsmWgM0^en$vPjR=ND3(A*_{fL(6lJ~# zNG}fHAh8RAc`A?3ZtRFRjH99CA?(K9HJ_}2ZWa=<&FqB`kI&%%B!L@N4{yLhuJIsA z2_viChnw6%5*DTo#Mwb;3c<@^D{3zTBJm84asPI@#iWDa|L#(}&$&-y>R*rzLLFC7 z9L%PjP4wrqm9oV>Vs`QzNTw?jBwPsS%pL}|&GU*>EVfw1T9>m-RjYZn2eSqPZ zp1)+vb|{Sx!yoa#w*gVuyinir)3!3!XjXNpm`uSQRxAcY32-bpKCf2rv43eCox>+M zjjt$A$Eg+#X{_cV(w#O~QM&Ue&F%14@Os;i@O800be1tuR=EsCBMoJNz|4%K=0yyg zShIv+!@8p^P+ZKk6-vb@mXX$%;Yiq2w!#kZkF-q4wPl2OgTN?Q zMmQilf5LQX=k2>#7JI#H<_o5Wi^hb~aBt}o8T2H|)U_SWweO2`RNali(k>8tP`>Kt z9V`_1RBzpIk5hHCBENg6(aUwPaTz>BC=A4WQf!AHP%wV0i`^6wx@^k zGHolkWdDfa=kN&}d~HP!yd3asz;EKQ_Ln2`9-3C=N}vWKih7a>GmQG-!etm zQi|4wPexd+i%kD$m{+~kYkn+j^99&l+%>P(D(=!nUO-goB67sY7-@m5la+cR!$$hu z*#T2yNxycJBS+$v$YJ5wdx`CWe|6-EnB2-IrRR^~fYe9iR=7E|Qb=w^(jP2w~jI>!eW@YQn??=ec6Y7Tx2RMUv6*4rzp+GKH1d!Q7sY2L86 z3rd_F-fr(cFa$%ApnJdn4vKDnDKhb+3qD*kh@&$0R>2eIEl{DvIu`-m-!-dD&7SCbYYjyHmKE@PFJJYVDA-p}UDY zJMUt^?fv1qiQ5C-&ekruznh7zdH2qJ-tgU|Cs(iSY3-HL_r053Ldp8x?v0#2y3gCQ zoV%UBbzmcpG~(()0D+xV7!#c!c+8bSR@oz(mFw`M z6V}j-JkswpkTM~P0Gmk(g7kE@IUXq=At^<_&BCz#fhk2*-A!6lAzL_U%a-GcVyW%N zKV(Jf<*jro>Io9OXmvsWm93C&;XV6ixf8Nxz55w0JkneHW`q$W+#(|rLd+k)NX~UK zgc*!_0+~)BEcsi>&rpQ==a>oY`)4GtXRR%+gVTo09u0OD+5>v25YNf>(D`QbdJTP+ zdwVXwG`~O(1ST_|Iyrd~&u7z}DF>3e_vK=VvdyGJ9pd zG{10VcIL%MX8XZ4AFMe&C1joGWLwSa)rP-|FJtF#4O7lT`eh3_)aFk{OSy7zxYn_YGjQOt|O;5?)za9x=9oScpyJDb5yI(gQ*luS9{Wh z_D#UW};2L+e9_b#>UWq95Gh}fVW*Bn8kIWU zhJ2+`OCV<6FqZLVVh3q%6_7@31C5Y;r@-JQ!3Jj*f-&`0OaMMqp=hdZO@VJWC0OnpY9g-tfjo7Z`#k ztIM1In~msp=46Nuf!1{L!PT{vhk#ZRf6m0t;QVthFO;rac=ZaBHF63V<%*Ccan#;V zDiv3*bat7&LUo&enY4#YRN{dYoxQLrS$qE_5yiTm{N@=vWF;TG7P_Z?11EIDBn8p3 zA5_DX{XlOZ58ckRzq*kLPU9H#2HuJZAIFN~;2OOez=0`upF260(i*sv2bYwk?;<=F zVR^cB0#6pOiR}QC&_AH=Bi%3fHI()PEilUOCRw{`#XF1Ofx_qi5yK8%5AAT6LfIJv=GO4b}5>uqqyG&JdM;wo|(e8FiYCvqmUx1(FRq~a+{D7 zdRVt=pm}&%9wymA`so((3@uhGE|kXfvTk6z1>wMuE0(q*ORlsp_GVINS^UTZ(zJwW zL7QuuTjR-boHqwqqr<<=>?&m-~gi=XWrfFoqP+E_r_5x$CMUuw)(D|F$Z z0O}JHA>2PqWOwj|>p#TnpT$>u`n(Zx6uiORw7?l4lV$^IISd4>47jFy{AngprT7C7 zK^Wcb-(W%iFTX@(DPrlb^Yl0H)!B^%KK0VoW~;Qg#yJ`IFi`SREJ z)it|82_Vz`7U%Ra+)fPTVI5-9(^L|!2H1oUd-veGkUWB`LzwFa(%c7oD24recF15% zN(q*k06gQWPfjrU!*HrW#7MQ@z?fj^Mo2NCLEr%(NQf-}(Fg`C7w|{1(-(D+TI2em zLt7wx90eZN1!8b%bi>cv`047t{|PjA7#et`y#dSAKA<|)Jy4Y_-sT@gM~;VxkFVgs zijT}Yq{PP*%-@oMNQ;liGA`mH6gnb4BEni{BZ48mm0kkDl8i)I<~Y}I8gTzJv~6CT z@u>2lWr;gm5-Su7WGI4Cyp_wcqYycAo8Jrb!!QW)o`Rh%LizHAB7-st=_mU$0A(^G zWmQ&$rO_l!UhgjRgV>j{S_gHZ>O?azTNV4k%!)3U+)AwQh@ket6)0XO0+4$nQA!f? zyJUe+19h+f%Y+tEK`h-Y&s=)(@(Z&%6t0sM;C7cJ%XfTQ`+tnW4iK;R;2<#^6B2TV ztRSGhIYniH=ZM$*h>d0f2Uy(1=6eQ9vs#$pYfF4Z9Er)GrSUc6o@l|~C z$)VSfT+FL6C!EaaZk`Q%9QSe&#$DcFuqN%ekQ9VlSLx4WuW+071R7fC0Hee~DJ)q) zZ>6Oa4M}578mQz1xu&hgq4O^%4aCA)1F40YE3o&{oGd0+ZU&FtMX?Cl*gBbEf=pr6 z5k%o_{^^x4B> zjmbcgT_HqlWs2G5rsgjnKR+`wGdug#BQiEpTJTJ{#Dor7?!N$>t8=w?E)lh^ z^vSv{e~8J^-b+Dnsz$j#m_jTgHU?}Nca8WA(L8G>Z{V%GI&T75yJ&kSr^fmurvW}P z4Kk34jTEeI!z~%%ph^D-838%o+25QaoOMCCxLd7yZ4vIBH7V5(@ppDQmdAj;@dm>3=@9M9(q4~PP{f*bZPt&alE%$U}Az=MYc zbif&L{iS;2<-2xLHQZyEA@s$Eo3ik+;-{Svk?`weLx~}u~e;2<5|4E5b){ta^G`WCf` z+swFW<7Ap?yKN_KCuwTeO{T5Wr0z`G?>pz-`|jI)3j(4<%e8>nw{PFO@4owU?m6e4 zUk&FOg?2?&f`tq7VASlkS}VE)2vpdLM73np96ov#KVmxLoY2ZFY==wXc?52P(h-51 z5XJ8X%-A3n&BErSK!#uJP%`0!IfsyzXS}~Xg>%>yKR>7Ucs`F@f&2#U!zKeJgv)S) zvcC)X{V_N~;kRh3OJkB)F>Io7!dJGkjp@Z^5r=&cg0y;c$?;k*uTktX6JU=Pg-ZUr z@+e`Xfiuh32}uyB=2Z=3KfuJ6h?9DOS>Nidbj^b*0jTFt^hts{m71yi2C!@t`}C)G zWco7&2!#sxP02WB&Pm1{=SKMArw^Zchy$@d?0t&ij>slkGe;v8XquZbW^h?fBLd^Y z4RAc-E%%e0Lr1EDmnM)`Dzr-E)p;?a;eD+-bqv} zhVug9MNj(C0B;9tb1g;4eDzox&%J?ITbCvTEpAte{RIrIpf!fjY~G^<#!g4j!2wVq zv}?*u0pNW%0sI{}xV7Ogh*owfSZob)_Zrrw8)p`t0E^i?Xk5D(S22Ez^oddKu1Z90 zT3m7bjISWngvXw$gnRKan(4^;c^|-8(4g+aa?=Zo*F;Geet~zmUaYSh1tAkXjfKf2 zfkKdmNfwFFHg|GUTxCSt6u@p6U}kA8N-mBq2#ZDpyXvDC&TLd-zFpR~aXgDvX z4)e&wMZ@uNKaAq;;0QVoAzZd)je3)>424%&4nmyItTra#Q z_59a0C?rb!r0LHl+aFx}pyy?8dhz_%-;1Xplw{2%l=&t0TE;K848Bf4r*|y;cbGtH z45`crr?$EKI7CN5`7#V;YKXRI78=(KA%`L?Zj^XZ5ql^Nc=x@@PuZeO@;*a$&=AXV zRcf;q&%cBhhi~VB$~kdzYL)mpGA^3>q0Afqm%lQJy`{ z!&h)<^kQ}fGL5pjAp52&02R6Y!-gi*i^INNuuEo8B4TU*Pl70`wBK@xKGvj(;{ z2yBsC{d59k9Rp+?gE$-S7bq$kC_1k217wiWU=GG{JepUIv`*hUQA&O99^#p#XL3_& zWA}8M%k~NUBC7o-oWT2Wl1guZ7L(x+AbZSnqI4p2FpJ|Pa)M}NMRW_iW^q2wh;aGG z*`d7dDSYbZaKLzi%jDQ$q_HX?!O#>;S(UhP&Mb}qz5wUH46MfdfZ$@s!2e@^1nL|q7uDxzDkqJer8 z4@I`*5r?@VZ#u=5X(U_vLSFFrGLcR2+DgU@Q(StK$d%L^bj3n7Xghp0p;x8|c;fbn3xqdd|MfRhWa;eq#{DGOKXF#gj^ z9c48$C`yh?iqOu=+?ESv&Kx}joE0u0T>v(v9d%(#W(zINcP|F!6vfUSJJnIamD)Vh zPt7;>B4*>>Xh!QOZ+jy5AwS@3T1`xus#D}h2uC>4HnG;b- z736XzSDy7NfU-K~{o8wIN$P&=BZ@?NrOdsV z=72$S@U8-0aR`oxjtK53KJ|xjFpfElN)7j8+K_ICUXo;IUTit^8rdv+U6nD_UjmYZa;3@1xL0*f*&V_`6E!O&R$1+m)rpzRpU4;zqZ! z$jXv+5EUO71_pBh6M<-X@S)9u3CJSG62x9qYgRubD@iTZp~A0Hr0vw@x?`1iXV!X~ z==rTxMTS9GhVLv=oV)-m*hJuNL+Ts3+LZD(&^%NKUX|03rH6r+aJgB0puSJ?fNNlk ziP9wDjDmv00A?>ExCy=%ZL}x@Yv;?qhHLw|bjVSs?q$jaCsBZzqofUTONS?&=4ZbJ+b{h3r0!eM|NNqzRPV!|+Gr z_d!-4 zEiRYzF80`T$}+P2Ru-9My&rU{8sS1hjI6k6VmB2N`fHFp_O&lq+xX_Jyz>!9dC>yIK-4OnDqBp-!PlZ3yOn_Z~Q zt0OUYI^oki{0Z+2#w&rf>Q&es1tuj&s|dZXY{_L4C);e9{jGR*qMzd4Nn3&#cA|t| z1fzO8d^`-+Xo7tp%1uGtpnEh5L;JH^u!DmwDr`l##iosz?$~1eb0dqIB9|a=LxOm3 z(qbgtybej6^8`{_Bh8^0ScKC?R~dOKC9f}zAW4>2MdwnpIR;%7m`nB(U++h=+J}Hc zxL~I-RW$P7dPN%O)&}R8q(vAu@{Cv;3{SBM@si-k8RR|=2*kBKu8rWDKs>Keu3+y= zmZ?(pgC3!<P}VqfvlBNG^8X2*v>gvL!T8Z{H#}LT$K1eT&@8fLWV^O*A(0Kn;sP zCDcH{UB>~wB|ZrT4T2w?D#tn!sDj5+bW*TQQJ`_9Q?Xtfd&UjbRW6`j6inUA*aWmR zb99CaiS2Tjg0}mVH1WIs8(0inBSv1mf%2W}X>)}X;~QLQG%KZZU7)7xlwKQ28m>Xs za}y4C*Y1Tx2S%kZ9%W=I%61?p5)=45lKMOd30u)5^(jCay^Snd;Q^*K;0xaxp@71W97I_Yci>X(+&prOBYJ&RW4 zuicNnz(`g4LU|S?iqhxh{-B@kN$Amj?GYr)^N<=2pikSD2eePP_S~!KE4@Z`$esKrT7JtbVBXYs<{rHvs*8>4#|=Tnq-~ zP6lD&pvuC%!R;x%Bjd3yDBa8iLzgnIX2LPK0u|QH%#Wr%k_tA+)rVD;b$xKYUu-2d zL$Ou5J_y&T;__gy1tUYgp#UWqYuYN${Po0|J{WAHQq0w~y-Q7Z1$U#S9a7V;H8wRi zFBe}1w3+}o_f(%QO5%6p3m&6TRDeTaCEgF}Vz8PW()UA`O`|3;#ekIzqu3yrhDjwT zRz(p-@Z(U3!p^0+Gl@L0Wns2Putep>1%$U?3SoI121iU58U~Q>>AnWa`Iyww;SsA@ zFF$51mZ{h8-WA%Bumtkig1%FLikI_(zL!_11$5ODLF`dJz`DHJ{8Dvar!I+npuQd! zx9#CQ0xGd9S&x2me6p%3?WC8UUt4VGZ*khbo?Nb&ljR^D=t-OJ#!T$)kq;5 zxIk1`Y8m~Ja*P&= z>da&UU|S;Il*K#=h$yqg6tB7egzh+{qDB?y4xPNn8Jezz)5tJiJ{LmUa#1^syP$2v ziIwKYQ`}W?9F*qJ-_$1%qLO6kj*L42MI}W-C9paAhXMi~bhh5oe-XrGsv;J_JYyo5egt-rbc*8Mn;kuUKdyfl1N zOHt)J(JMU`Fo?YfMNlhVM0pZ>Xi?#hcP9;p-4ymlxKg&gJ^!Zn^Cnlw;-C>?+KpK6(sUI4*!Th zPx0`_JUHc$%4gt`$4DIEDDJgNA-IbW>Pci05eb95mu6_XU!n-c%a8D1`15fdw(=lQ z_*I?>0R9ZlTIut42xPKkp7^St;Wmq=s;Ni%VGC+g3R2rd+XpJwDYT&j5NyrTrQDEM zN915V%HKIDw@s`h3Ykr`c}qp*wx9%S;4_Q?Psc#DsPNC}G$djmE=o6>Su}~COZe2w zI9$Dp(4oer*w*wyc$lFV^7Dh(9yqA^k0@!y|1w@vxX`vV!vBP8_h7!&#RrXZJ4@I^ z0q?_ZfMChLwKYS<{$B(XmQSD_(fhj;_L1(w37mXh`z^xz7U6wU5MImV5YH1u+;TZ_ z&ArVnr_ESSc13eBIe{ma5(S=5;UnhgT+i^?h4D<}I79=aUHR%SVEuV9V2%HW1_u!O z3;URhHPC5nqp-16zYWvW+_S(z_J&8*y$x+C4VQtUbzF;J&?bn0nUKa~-3n{Il@1R4tc?|Q1FB(Ny@DZ9N zC^^okNZG$e@FDR|^%`e{dd1Xff-8dU*wEc<`b&7SJ+sBvc#UFujkYEvR0uYKT4+E> z$d&hd6hc}j^pQr(9|a>Jstnj#XL_?p>2`PNdwL%z4fk&D?B)N`|JK-02b6pD28B8;Gaw|r@mlAuo1ue zX9f`HX|q^kLrfKH#kF-1KyAnOPyi7VzSjpkf_w120ao4Lh3}0)Id~hsHwDANefZuS z+#fuE?=8XGgPr)^8jJ+H@VzbAjaB%L@CVWK=6&6)w#}YtcHvwU&MA*4+#2759}v4T z_-l^wrD=l#0V?tk1sSY(V`v96V4GJiJ0{U`&+N*X6j4u}d~DqFi#dS`;W!{1k!bvR zp{uu-*i1h`z!Fs@IGDTO==CI;+hO@k$m8cIJ|c8hY4I%SIRG!Cno=@XR&4z!&OM+J zc=;E&Hs%01-P|PaqW9w@GuuzXCM>+>I19cHh4r1}7q3xL4Q;4e9ZK4GN|>363d`ww zS|rREM)5#;(8yuoHGDn&LfXn<#e}EO3=qb5QAK8m4=o-rX0|BSQu_wIjU6t`f$6|8 zeGx_)5gJRf!a#pKlc-jac9!spZWSP0oSvE6l}4;ciCSqkL|H)qky)=Dysx0Q?SnUp zh5Bv|o|ru4X^IQhF`HpN>s}6|zQYY4iVLL4l+mR%K6o+^IRuug)fo#3Qa`ZNaB!3z zoG1uqQRkZa8y>?u;pw0?n)-m`j^nb$b%_eW);jadqLYfXYHpm07^e*!r+5K3Ok7M3?+}Aa))*`$q!59!=b%CdqPe}2mPMVfwlHf}hg>y5 zw>TaM<^cIMSy2lm$dxGenyX{hBk8IyfD!lIs*_(xwl6oLB%{$|#%q_E)pa8F0aEMx zeo%i*)`IK%v^lZo9qnJ3uKP~C#}=lS=o=_uy!NS5d>^DcaI4yPdUX8Eky9t+TWh}$ zZ-qA@r@!*f<9si^Dvvzo-d%a*7_R91;S?Nef{A&=TFa;w?w#JN+xaK!L3az2%qjI7 z8<&t@Q&Z&=Cs&>z$C30KpGfIn~j0wxbM%Vo@ur?_M@x@mtGpv5AlB=&@5NfUIz z_6-?yq;)v}M4?rnqe~H9#^4Hr2@_+c-DGeJFw532-lAnPuE4|$uS!2_H{OYXP_RBc z^el^zPSNv7E!Uh`4WbI4MmIj>`by*^^wQShdK;scWiGcAr9<_82L1vfCY6GAjwuCc z2LmdaYyRX3=Vb-O2Q(UtoC5Zp1A!OR>K7qI4438^O){ycQ&VTcr5V5gqHj_4!{_jL zfSG0D@Z(T!nY)M+>VcaJ7k+MGfq_jt!QkM7 zU?_pJkQW175rz_RB$;~`M88Oh2X{x!zp=tlVo0P9Lg+-Ey|(d`hu1ow4lcj?JLuqX zxCPeg*da|{MCvPqzROG$-hFoT?C7zH;;AF2$4`ts4la(vpu)0)Jo^FS1#(!D z0|@g$jIIY^w8j9YpX8uZ^p+~pwulZq7oWnsmjIAknG|+@n~W!rLaY4mbRWbg(NkbYgr7dRN$N$@+;ihTZyMb+UM62X4tPGT79r5X)%{|1ZW6OAk9C6}Vok zoro`DGVKgjZ$-q-lo-?sz3f66c4uCWzxmoUP>Jey;r>bC1^D(5-XaWrz{!u813NB} z^`))%v~tv^fkK12eUpGWBVkxHeqD^?Oq0i{g&3Vzw# zAR|U;Ufx|^E;CML`Fn@sk3v)fmWtt<@tDPnBfHwwV2EeO_Kj5D8Gk$s@#4hc9btXu z&d9F#Q%X9yyTQ1Z!?5#hAS5*?+%=T>h*m=r2H6o+o`Deq(vKvxl2dUXT%D>H=HZhL zNV%*<;SrAnAc#`~iVa;kbz*9xVXBM`CoAFKBJ_}r@=o}YK%um={6N_>q(U*1ZB50z zA>KniJ~Nlx3p%=VZTc8OZ6Ru=vM^#y;#v$y;-gWVOuoeaXPn*pq%~uEwQP7$wq#ePGHWZd6sFY8fkHQPMw^Hbb%)*9kz(X|m;K=KQ zi&Rj8;+gPRP$PRNPDC1AOeyFi3x+FNCbAW?F`5mv>Mz$za9cg}$ zkP1-ad1^(Qh&OKUkp#HcS{6RZ!@mZ}1^JV& z&fP?--)~Sm55tFk)ltOzEz@XTcN;YEqcQ6-uo^Q}2}c;wF3WafV{6xH8*Pl1?AiIb z+NH=eDboY9`m6Qq|JStMa&)fW zoQiJ+&9@D?Ey<%#lyB)0zFPUCrK6TAx}l1=h1NQA)Ajau;!-N2_Sw z(Q-_~h-Dhgofy-Q1%Dx$cIs0^Hv*}Wk$z{ZU&jkh8xN}0Zk)lpcJiWAD#(pT47d|~ zs1oD~V<-?D?cW^*%cu>v1(C0_EZndvGF^69zS$eK6>#@#7r0B1e)U0_&h`^FLM8Z48FfWeEJc7re= zB1i+7G|lF@iYVLMDm}ROp$9zxd;$t9$dJ{Vn2Zi699X^bMZIpTsg%LO(jP%tSB9i? z0Ao2n@JYMFg@wlMJ!P8X(^W$9V+At|vH-JM@^7|6wKB1fl14>D#ca@~B*walp3>H> zXd|I^uTqqFD9Tmm+82{3T4oUg-!6%p=)<1Twh#D(8c8I%H3NzH7CQ`+O!`&KN;__W zwxj{HqyxW!(=6IaDm#XBqNCeuz%pN16P8)Je7PFNG>w8s%HKK^qb+jN!NqiE@ryR# zO@&+I0%A1(LUeg6CQ4A~Cc>I~Eb?V) zkMa4ssdnwVzI8xnTkvLrI@_iQdA3US8sP4IYk)hJ-Rr@fXpC%K4)9QD73$o-IVwK% zsvDTOx>$p(LA*XFatFmyd4L#G7$Krny6}bNk@dLD+wmN;^yMC=KAs0RDSQ{bDjXjYr%cIpV|k)Ool)J5cEU*M-+nP4kjS zFC+pC#HHB&AqNzg>Gs6Y(Yir6U2ONFEWNU{JcoqDOubld2wodo`&6C_Z@&X439)Nw z%c)_|vor&qLa7i#OsmZ*&l!$9Jjy}m0EaE!GJL5RbjJ6*Qv7`maFE1K_8nb9*SfaR z??w$cX)z%IXExEz9!~MutNWKT@E&=P7O4zJivTl-ibgvNIQk}i>v8xtD^tF8Y$+n?8XE8^`lg$o*t>Q8vf#4ml6SH(X^JaxA6Cm#*J z%0pY988)-ZKGPT?#ne0Ss1ctU9Q7LuXdT#cjDr}Lir)<3jDIl;+{3s4S9LKeh08h2 zal&m4gy)q&wCMMFgjkQP?-39Inf)*(ImEDHR;!$3CNEU~5EJHE;~TJ2n5o)*o5)oJ zFHBu$F$#U626g_Y85eD8hQN7N(LFx5t^7C4cPi**2UJ-@DLHvmB1KC;z={M6Hv zbO{|3i}>b|qt$sTX>D9zcmEIsrOd|hF7Y@e$6_v61r1f|FaxNF`x!zX5@ED@&Oo0% z8ios@8U;Uzi;4ue@U2z%E4(UF2w5{?-`%!8K8Yd=Tpz&BNZ>I9yY5Z+2uZOf2@Ypv zQG_evDSYY-vV*v72qX~>G02Xrg3JvGrUN@stPimhHLD^hd>Ypn40e_)zgYbk6>CeH z23IFvm=K%G23aIwNe}`JM>;nv7F11p1i`-WSt%}&cl2DMuFk6K$^4Yx`^?i0Ost`0 z*#&8&76!?1XyO8>5&?!VOVkBIQUYdlNz9^+u}j%%j9itsVexWjO8p{O)3${3L3KOeUR1aM26&OEW5<Af{d3%tMWZhPI1n*v0 zL?{K^n%JQg1AYitKnv0xdITUeXw+n~5qVuqVIkVo&8voQNJCF+cCXio@fv^K)kV`YbR=M0K!SIrOmguu_?0j z4JX6JsNJe~dN@QrB!VSJuLUA|# z+~6_R0>0j#FgRbswZF%~k)27=?A%~f8xIls8aXuRlMu?RjYB&GBYbdZV5KZaYBmsL z)NaLRB}s)9uS$4=aqEJjypp3Fgw^w5RTFR}abRw|-r~dzby+PZW+w41R?Ljsx0S80 zE*&$4cb+FP%v=&rBAlRGm5_oqgnN1&%ySJ-60syJb0n?}-x`*r%@qMLt-!w`G=K;J zkbGiX^I#e2vECzVbs46XM!8`|xjX_a7^%&2Td`-qdHn*^@!1E*~bR)$-XhNA<>t1jhr) z7EfiE!JE9@yV^TY1<3%HPA!bQSp^|A>;OhUWvRQ-6tHGYXl1J9202o&zTp^MSj;@FZ;!%Ekm=_izxg?ej|ns3Z2*21xNEf=0P~6%^Zq8? z1N?;>nqo9Gb{jAT@o?8%m;$CbA z;3PW(r`jMV!BY@H1JGIsUqabwKEywoKe>1UTkyl`x{xmiIufQCwjh&<0+g6c)T#iw zf~QoJ9d$ukPaRkWJ4W3Y0bY!!Su(tE(K&cQ1Qmn`(U^A4X{-qs0^c}@t!ydKR!AwQ zA2?BorspAzy&aXRUAA~MCCj^>V2fvaB1-p!xfS)_ui&Y$%?d4=TTVlob#nND0JAzV z_~HKC%zh6pMKe2r+z>$)vx^+fK_1`vnSut_c{z=+IvQT56!c=Y`@)al(dLhMxQVe$ zkl|fLfkEy3d8|c<3F%lQv5cV^S(>Zb=#Q^OooZ@8)=e^55^2{cU}EYD_FE==4^BAU zvyc#|ViM|1RbT92m@GywXGoo!a0(Y-%#BU-*tqTW@SS)Zk#tniNL`}B=@)TkU93f4 z&T{R74HnwSNf57g5o+Wz&3m5FF_8W6Q!u!Ur`jW=@{iNwI`JTWbm>Hy8W+NHLPuI9 z+Y#x9O@;b-)TRp9-{jesaJXe(Sm>^JUrL$gj)cCz#fEsvi~dJs?H1q=e#E;1NXk}! z7X^-p(@!t2UgOvnj5EW@B}lVAECHC{%Y_QkQjMd08Jfn$Ki zz@%aA9QmIZ40MC9G6LSlB;hVJ2B*)%`r$MHUltzt9=GO_i9oBz0r}gw$^;7xHt*_K zU#=J{mp>Ne=P7(dgbJmsWV4l|(b?vAi#T7@oikJ%SIVYRtO|y{6BO#yB^Y+ZDQZ5F zR0g|1sN zRgjjND+L_of6HqThY%4$QmA!&SuajXSy&(yOU3>npB=G^V*rrb8$qE;v>5vhPDYLU9aXxY`%JA z`Vw~k$ajCi$VQ0VAquI}_7mu(ItOQXc(AKWC>8HspNT~rwxIlkSX`bU+DcgRXMmoq ziKo|c-*%&~8np;uEY|+XLizImxp#3aIM}HIa(8X@>a2M6xK+y4>UK_a>40gKMEJQO zf&6a5jDmEqdBbN%qZsdYFkrPQIzyp-{(yAr^O~yYWMw0F3VSasaJ{iR;EH}8U*Q*c z5Y~vZ-37n(Ne_@y7%>WtIWQ`=4IHOj%eaB#5EGIFrpq`X^FU9SSC}slj2tYjnDC6Y zXUH5V%*3Ced17Nigzk@$5Fe|;;7t|15%(J%2P}yEp%lSMvhRcq&f~Es?e^>e^iQ?9 zdPI-yAiD4RjItq9r%oc?mo#;swqypu%E+8t-)CRs;N3lKPkaUDZG{xXtpaWE%zmyomV+baDlUPx2sU zl42ocvh%HM9L49KA&_!soa$vb?>N4*?{ZK|0!%Jn@ZBk#ECbtdyJ1q1!5;9XG@wfiOKr*Xgel7@KQN{#r0{`4e+VO120};XmYI zjtAKRet~E2<{|!kcp29WKh#Q3dt8dhG3IgieS|d<1%zCzjeGVHcnW7?O5*86?WO*IDr1FBZHzV5Ms8c4uLy3On8`k~`+b zT6e@)>t>tF7VIJiod{hr-}FUeV zFkU2M1e2+qfX~YagJos>&I5s9;lSQZNOf)j#LQ;B_YH{M&Oy90jXkPAF^1M@MRB)F zwMzELn|rP57xbBxGT|h;ghfsAs%{JN=}b4=D@hfktuwIwLVOO=&?%WJOT?qQv4wVh`Rt#&paY*9edG83MA%ZMf&LavQlSM6C_RS?C4AxX7T^mmmy8 zj^+*Cn<~273V#YEznOivBIO88Ibt9g2KYkFhVfqWEE69K_0lJZM*5vzBHvp9f zGh3`?kcIhw0W4X*x*qUuu?R^6^>QY6m-4cXR78jxsx3GOdm#GQHAG!LUu7^d75*Rq z=0aqX64h(uJ@d%UT88)nd*0&^6*zgMgNjHyC~(v$@3tyaQK;?~esQ;_a@?&o&0MWA z6I#UBQQ{$nL((ntdKe2qrh{N+%qbLC)gIdWP%s?Z#0-0HIfRW-EVqv?%~;#i=+0h} z@r-b$u1b4T`m)mE^j@vi>V?EgV`V+J0XL>GW=%2#ft(xcB(okhv$f7mwzRRnE7KZ? zwYt^kz0c7*z!DPsnv6FT5lFJ&4NOmYBMo{6igz>ZCk@Vqd}LyPn@MRy1M?@l7ZBz> zxE1c=p>6Gq;o9Hf+L8CAoBRA#)7El75fBrwNIGE?o>RQ9#IZ$I@Eo~Q83?{R-fFy)?jeFef3M5>a|M2SF|Gkm&Fl`I@m{kUWAnd(S%~l#VW<0uozq41|eikK$8*yG(-$NK7DGZXE6W+0L=t66+w=ewD}7@ z8toWeAVvt1+feE`q`JO@-GRtbFpCTh>?UZ$$fU28AM_1OV#~PWHPIT5@`WYH3OFj^ zGd$pv8pEFK%lJmZE;G!?)FdJVj;IwG9!H7x(dFa6&e0VrRzlt*WI<|f@n*3SRq4i< z?@R!7>bD%I=bDSKQ$W~#K(r@+q5xVAzx|=a(9y))tq32KL&MhkTU4)o9+ZVLDvzOY zfGg?wO2VdAFIJst**|pvM(@v9_oG6ZYJt=|yu@K0N))1s4v}&!6{23?8MnP5_ah-W zW0k2A<7k692VtFu2=wJlc8@&--}JD7G`!eG`yhMR{?>Y;8Ps>m|)Ow z4=WUm#2OOu3m5*sWYt`DVyu!AWN=&9UT` zwH|LmOJnH88`WgnDqEpvT*YW8)QL#2<1Ebv&0JUp~}OF$BB3F(pf z34#!~71b^<*17dO($_d}jB(?3Q!S51NIT-hS->+ya1G@mCWevRuH*=0-#m0Zt<7A9 z#=_qwLj3ZfL^O&ux)}ZI$LF_?bAMz+p-2NDG z6j-Uez|*t>z=YVa?$$u8n!j?`s@UAn`Ic^w^ecSL6##7_Bz*uk{*Q%F(w2}{VsONf zJB*Kbzx#01ZY2VoBixs-Dvx637Qt9R(C(~nMIpF-` zx6_+b>R~O8Z)9$!XtT%djT#gX0GYkBt<+utoZ zp|*Fk&?fKQ0>SLGHJij4*C+pa@ZnSl!Cl4)W3e#*-^-c$4sJ4_L4=%Z3sB2T5Xs)i z-$ZgdY7d#)7uf(|#`$SPV`1b4lII&mX2ZFjZe?v%S{~jlSgqbVEfv)sN3W zV{mrejiKe@?E2Xa$edG@_${U75(YU}{XR78C?X$FVNos5BL^HI$lDwUP%{p{1n)AN zXLvumV8G(55OD!&T zT!t&yMWTN=A<(2p-Y6;?H`i>uW3pCI$~^h-ee5wS)ONS?OQ?R8;azOMZ{b`W!8hB; zKTBP$vXWh#iVTf*?;cg1!0z3*7rJA*eCs4K9E>{4Rpcx-p6B51!FzJYY`I2;!C~U?jc)`6r6c_bJc}*%~QxYRgajUwO$ecV( z>Ru+!Dy6q&dqruVGb;RM|5?tJnTHsiy#-Zk{w_il&0)Q$Ej>p}v{AaF*Nc($Qk-vK z;e@c95zgb8^*)rE!FFms({|jY&qPQif_Tf4I_OJUA@#^E!&2$r=YVnoEqjneDOu>4 zqm(^%7`2Vv3`4n{Rl`uB#v)6Qw?$-F&V*+rC)Ue1fbmnq&@4jJWlMX7N1Mh)_eXx< zFh2D%4l6T%AeF^d?lh2hb7wM_SCy41%TCAIapwexEIzbcs2|6HnweSHZd~25oJO>@ zO;ET(4sfX$4ni>?7Ddkw!bU?ZjSzDT+yHX~G=Ie z6ZY#z{f4fRUiekMc530*;`KXP4r=+{PGXa!ZiET2+iomYAQ)J^tIQ_G!32gj56F(I z{=S{{h0BOqjgLrLfNEM$% zyHI`ha(F9J#H}ZqFOZv-^+dJCTGrF`9O4}2uim?y!^+!2Glu0fR^I;QJlusen&Ez| zz5Im~6m{57hV_xcY3z zyDUEk?3OZ#6OZNB2q15Q^{0JU;{ptS4baMB&Df{86|T|w|dw52Z%o8+2) z5QmAOg}ETU+Ln2*#zRCbMgCIIhc9vA(bOWf-Yz_9`DlAUQ$;2YZT86`)biaVG1vKu z7vk@Ui1IbuIK?-SXA>#^wvgA7$?<*^TA3UNMK~}PjMHLKeKaOYLBGgiZsPH7(*I6b z4A-V}Br}^0D~VA;zBUDb)aOb6fhuIbe-z(hkZ84aR`h~4C#w@x>~i|#)6gPvuqohV?Rp3!0)Awij~WNGJAKaFv>k*)q`-7lJZIG+l&eCX!KGfJN`5 zr!-_0C;kf3!)XyT^~u*#MmvJ*j5TeZZG&4ie9LZIse z@3FZN8@(0V3iCnnGU4$48?VV*{3b_5#&(qw%>{pjbNE4YTlyZp#ItA`BLbsSo22tqP4QQ%luSFi~S^L5nIg0qa$o`78a4T;5 zKe$1K0d~jJ9&1fe1pi_j<{~a!eOPpwnVS^yjXVcY0#tLEx_2Zt;O97F_x z87Q3PA7K6>&hn{e z%hKs`)N6m$E9<2=-{SjdG%4y+`t2zE>UcRsK0z>iFmqni3wR31Wm0-%CFl4@dFHfQ z_n6b*wDE`<7p=WoO%>j~TKiPDTCD|AwXM0HJXOX?6kImiKiw-$dqsACt47$DxJE=n zs?-`TmYSL!10^2AjP8MOc&=QtAn&U*3-Ydtaz8~?qC5m(Q=Q5UoV9YZ^@aIXK3KZE zSoh@zs*VX+_%sh<)U%go5AqN#dy%74%KjCu0g=iLZlY;6WOAGEX>Lzg4+<+|NLA1l z+d-uFUG4w4{{KB+!(}7^jRYg$hx>C#WdxTjQkfC=dI?cwf>puZ2QhDj+!nDIO5w{` z3^8oYy`Z2LA~E{uzf9|$bhB!4wdATc5p0!I0z)S+o;HTOV?d4}42!2DNw+p97U@EW z*ucMV2&mB9((!PGZJp-O@Q;JXkMxeSSwk`x^bn5pz%?6|GxH@0BLrLo0|J;85Z6$! z38Mmp>mqHfrinX@cOrR&Xibr>R?7~XC?tC9LlPp9LV`o2N(acR!yyu(=YUuvXhl(D z+G;W-?aETQy$jS86<*^y0~jB}Ft&QnA;G0q7yb~5;}7#7qH*7H_EnTkvYc)8D+xwa zDa<5#Qfr?dd2|4%&dm?xp55n1ijTo>p$*pwJH^EteuQxHqdY`- zHDafTn*NuC2DpQlN`s272psm0i~BG>36X0qkb#mAeLmI;O2B@_peN|XcPZ!#`tjWp z34{*$`}$s5P5{&A2lVYzemFdob7*Y{&Py;I80qd=CZdff()yKa9q9 z^UV-M0ZdeOks-um63C*B-7<^^Fs=G(#BA3(OB1^(`UKbCe-zQ@U~#hd4EB@FA+Yy$ z1F?^!F2+7OEO-zj2QMpNxk(yMr6y@i1>3Uc;N3dO4-~4Cc`%j*;KB54kxJGevz6sb z5-$!+=Jq5elTF(feh;e0R*te*=(`+O8U7wFvTw6+UH**ghHu4k5avmBWNW z!ca>5|28GTA*oAb%T2n*KLQNVi^w=faKi23tOln*@6PiKJ%>6AAu572%*Z5x53xUO z?S36kc;OKWsud(j3EA%U`x^egm0b$w5>a1n5+dLcc1GHlZr;@~&DsqVb5r<+bb;t# zg)W3Y#+gVkac<=&q2O%1!|OxmPC)y>a&K%We*d+0qBvIUoTP?GdScCJkl%$5dUo8R25jb+8t3=!2lW!A zU_sc8R-P&B>fy%-p-j3F60L=YaIk3P6<&yFjGz%-d&o_9Z^HgnpjH~_pq7)(JuDY| z{eB#DvTwliO-)kid_9}LQNWZJ8S+b}Jk$IGH*i4iB6~6%w9N4&lFS?`*J4>Tm0zsE zx)M{gq>g4Mjv*-FdBkf$?jaMU5I}-Jk%&q+y)Y9_s|~%f2i~9x_rR(m<#EDP+2k(@ zceNmHLH_J?l$7oJaZZ5Df*^A-R!f|ZR;CuTO-HLw272<`H4MffZ(81#yb8V2 zx0)QR#B>8884#ibfg9Z9Ky;w%vH49mIVa{}O#(w9#_+`E6)c??okye#*_P(ZoGxY6 z?BHLRnP)&C3_o%K$n;p32II^UkQRAkuCvRl2g+{T@j8e$(E!&TU1eHq#kpgXM1H`> z(F}#IsGhE28f6`2!Yy{~Bu(kZd6NuItB(=+C!ZKSa%}W;m!&3*U3{svurFu3PI%M0 zBY9f2;8<)*9k)VRDycOQYq&u*j`NvXO7>>C=3lz*9y7Ls3`)Vl!?5};0hl8JABpyP zj(JZj<&$92!YjyhLq&^P9=fNIt$@*+7zqire4za9r5Px8FpYIoF^%vS3)2$* zRE)p1TGz%l1iC7g{@Cc#qi05eK=D3L?jgu9BtEl4nG9QN4 zS?0;oIzfLMWIAr78Yb!lQ6K1Ay7>#PlXfEPKwvq+LH{D$$X7;r3eC||)YgEyQE%zkmVPbKr_CL16f*B_Iqr`;+7XQjeT$f+yYL`4LJm4x_?8RQP; zCM4THlso`L4ggWQ(F1_!4G^VFR$HN3j01#mv9oNKT1lHoqC``$iYf=^%vmQUbGP6f zVe5cg1{$JZ2~4Q10V~)^u2Ew&Gs0?kl{iL@Mrw!_W!C1VIg|09-F4Z!8F# zTiP~_4cMJ|cQs+-VIQE;iUlTjP3R<|CNQYTO0Z;UD7n|z>>Fm(0}^4at(rh!Njrc| zl!@2Kmo1<(r0qzx+wdUQfmj=p8wyaBy8xB#j)NTVg@S|&kD`_bl z2-mj@=p-V6phb#Wl%p5e7gG5Oul)pG7k+{V*>Zo9XX30(_zu|^k9Z0hl|2`k$h+Cf z9jK&~%cTpMQo+~{^q278+gIvC_L%j8n(5tmF_hMfutstpx+edWXLs=$acV2acu^tw z#8yvTGF{Kr&WYNmwbA{q)N5BOmofSEkn$g8nz#xliim&CXrLk84nykG!#*B_P(T2g z3en_LxQ>S*9@gW~Do!F2<=o_Cc#fAY@bH&B`~?mZTTd~{$+6K>PoMn$XGX`)Odfsu z1ir>QZ=4w&JCb+cUz5G)jU@7R%NMvqS(Idk$E@hqAckSmi%PmZCv zXTpoDP%z|no^9sg0UpXcyp4xp9yal?k%xEi@ZCJ@=ix99Pw?<056|*2&I5VMaDoSF zwp#_=$Aq8b6$$O|VV+fa_y!L%Jp4Hif5pQ`dAQ8O0uRsgu*AbP9wch!b)LP*!%IAT zga=0Ek$@WE&++F6`16-}MvGE0YY{De_*q_inb!!&Nf6#D9lXq>3^R|Cqo}uV$YmJJ zRR3YFpXry+NVY$lK?F1XFJ$R6wd#Kw-K0yI{_Icz|9Sc3hw`O+5#PPkTR`%qd}##B z=#in^(2J=TQ}4$yFJ~v^_ymrR4)qN^GqiQ+uA$>YcMt8rl}Cm~{|7(sEz1A^ literal 0 HcmV?d00001 diff --git a/bin/splunklib/__pycache__/data.cpython-37.pyc b/bin/splunklib/__pycache__/data.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..75601991acd770e792a5f0f0659570ab86e3ac60 GIT binary patch literal 7289 zcmbVR>u(#^m7h0<6eTN;{7TYf>^gC%m1(2z)v9h4Ke}!lH5+@=lq-WA@eZZ2Ml;mC zLq}4FKoGfT3iLyY0{zmWkU#F@PYAF;|A3-MQ4|=Uz+&^s|3Xk;_jk@EX<4zqg!bIm z+&TB0bI)6EOi$Mg{N^8-;oD~o<1?C!e+~xU$D{s^Mi?zan8HfTEvsdjG`17FWutcz zXUlE5yyhm}R;5+J*jqPSzNoaS!f(|?wKXMbt-6?MO-IvF^`z07L0?0E4E+@PS@d=E z&xyL2eqgkYiy3hY{qtg0JcoWx92d`{KOyGC3G^q$NpTAODbWz8h4;X0ofc=r3s^ZL z=EaNRC9J$4{y}_2yo}L{VnMtj0*qb~7sXj|4x_J#rueEjkI~CNFvP3kwFicHEqZ0& zZUy3!xFBA~o3p~cZ8R?L{rt{G6wJ@7elkcmlX$Hu!aSUx54JKfNTNVSp-@4NJu*`L zELD*TdNSJz^amn{(_lHr{6-Xspw`ReR+ujY?{CE2jUd)v>cwE7uw!i(kX0UJJqG&D z_1kxX<+pD(YmaahM#C(Rg=;EH26@zuxB8jPSFmQk_tq`yRJ>C}r zTKyUH=B5SSm@5sdti89g{MPmMwVPM(l(k!vF|cvca4Y(ELr&w{&u?5^Tzy++>oNo< z;v`b5x0#`=hn^Jlgpv;GtLB=BqU!ybm-}49!iYKArtq^eN&j54&=dFkt8$_3Ok9? z(hKuk#%qv-N|Z!fQJNp+tG^K{+N8d9=*X82A4yki<6$9u4THisfB|}F4$T54Y15J? z3*$CcF|$v>RQ=34gL%VQ`P>{n`ak?0F-nleUYv@C)pB=8EL)!J4rH=>6r*aAb;E>q z{38xiWCsniX3DeZyDV=QxOfODdencRDS*~Q;M79mMHr_jh z!>~W#GacdKoZsZt!WLk_N5($%^XEp-6cyn=@C*AIVBQ9nU`;jrJy>#Q1Gc9(NV|EQ zr6VI21_~FJuEX$zdC<+$Jd9IqpFz9qhl{=jvlNgiebvlh^aI+WeW`iJ1RHmW#Wp+On5rF04 zIsk4(^hH0+H-bD1U{1Rm3qh79yKFIePE@GD+QpIJsUXCWj2T(SNHl9aS?q^okn(6J zUku336P+|8z_hcex8=M2Xtb--(M;-e4nyEW7lY+zV;H=i^@-PE(l`us^x+Yx6Gf^K zwW=R>BUQRQZrG1V?pV{f*>eYZ@8WmlD_EVFK@wFG)93N1lW5iTO`K^k{65tUw)QZI?x zvO?M$CW(9%vxZwbbV;Sl+$$YIymZM#gG=3PFtRi-a~H5(lBtr!jql=7=g=5+-*8Od ztQ(GHnZ7e;)-2yVjTPUd^%-lzNiqTST*aflgNBUWzNzX0b|yFXjiI>{P4t@-tYj^2NvTmg(qi!aJB)DZ2cuU4zzJPhdkuTD+%Z5?< zJiZ?$gV7a`H)V>-8ebZ3>tIol=Q2zp%rj4#byJcFn;0z*lL;L+Fv$(zZfP?$w0FKn zM&j=L(8;aBxd#)YUdx>iO^jV(o-u}Q?iEg9?puXbxS(ndR8bHBqfZAnD)O7?WR)6n zRizDOEoYXMiX_SXbtyB+6pj#Qni?KctM7@pTl(FNI1v*5!_}jwIbe-8(CPF*Aq*|emoT?! z4x{bl?mZ7?`#o?3Vek?-q2H~8PdoQ=_n=bPEDNiU1#eg>z`IRPevIDb-YL8@226V8 zp=lc18>9E1@cWAV1aR@bsH_8r!UKF4d%e-#ieB5^pc~LQ;uiMY^Q04vD*GzA2r5|s z7QuCVoNcdT&9M2 z)^HE8gP0)6e8?-*uo6DjL@kbF6@P)ZBXOKzOF{xUXUc0>e}cG)IMN%K4y}sTz9~d?IG(iR8+sk@5YynR4+={gqQbrB487bfJn$xkS#n3bftq3EgWR92 zRAJ&OJMR`2Oxp@fSWUkH_xxeCfcilhKP@VS4+B@rr-~YkqNV;&)YeT}h2rtMgL+YA z5&C6O)$7~8gfLYgOtyB_?0p;4T8_xgNebO%gBn)Fr`-t~Opim%~JeZY~b)3p275OpRFXjK8UA-Ij`sowLG$)aD=3- zHFMUgTMVNM7?Yok(f9GFe?}7-EmIgRTnx$qR?86xY>|e$!bMtcv^)+!jaEfeMGbR5 zs){L5e*nEjVPIO!U~Nhq0~=@Mb$|>16VN#_U5`>Y#cnXj<0Q^^gKiQk6+|h8A}SDB z9)w-QL_ zdEAR58FUfuu2Ei(;3c|`+&si50bV7dse{Se*+PJfmEz)Y07GbWI$F^X+9I1s@MWhX zJDmmUnm_>SI6c}P#5fwq5fzLv-v8SLY`Ac@(`g3FjI0`?`> zK!I=y)&OUQ_B6y7@U|PG(g1iDfk&7Ua!nc`LZ+{QaP~e>M)b~59y3IZ*!U(qz_>m^ z`8oiE&9!EC1O%boE`|LA874&8`xM;}xKR!OT9}85Lw!CY4Ax~f=o3&7rvkMBY>3%? z8TH~FT)d!&9xxc!1x^s?dd4K`&-i{AV2WY}Xm~6ER9B(9ATX0uK|^l_H^9qiC*0~M z(LxZ@{o%GI2@nHhv^t#&y)3(+Z}P%gD4T2H9#1tJd1-sFGulC~3D7t@3NfO%j{&|P zQ~H9GV+0wHh<|(?073VUL?~ntn51G`gCviE<(Lgo2-#q8Sqc`bpr0UBpt}R#k$WBG z!a{(YijphhoxT?mNuq?$q`Zp3u^^6?Nf-%YB)c17x*kCXp-`0em1eVxT#|nlS~C7* zk=X6S`0LVuC1*e4SeX-2OV~(A5hw5CEi~WJ2J{PXxC-$CfFW-=PTBrPfOp2|S?{7U zwQZ4efG1~P)vzSR861>Q-AP4BPdhOR3_q_fku}-r%PW5&+%%$l=d_A8+ienyb{h!` zrT&z;jJE?Wn{qNu7{7_GbV9kV^r~FXKN+%NvhKU>q-hA))1lh@^C-5bl8DFX#6Sm(S<1@Yo;?c7X7Ez)P z{u?%p>S%|KoV@PZcuzm->ER>E-67n?p-WF5YU^uYcgT+MNN@&{)tKb}0gx}AJb67K z_KA}zEY!&oC3J;>s9+RtnVj=9Z21Fhk>n65wNp@1Eq;>-QQpA2&+ur`fTQ)zz3CCR zbuczLn?g|vf0^n}F~F$?;*`1EIKT*1Ae>HX_pEOt`OIxf<90sIodd*%oOFDsqr<{J z@Tg{_u5Di~Y}A!V9l1a@L`uhr$bWER0Z6DUd4+{osabf!5Ehlu5Hr0z^f^kZ$dBoq z!!gmYvb~2G$ibRNH7BHmmod}H;pHDw9Sbp^uo1caF|P`wjwhk=6qZAP-4#hey?mJ( zvLJhND;b+Q7$9wx7lT(-qb5m}N-edNBe&bAA4GkfYm&Lq8B7hu1DPwbsk+GEqH-MP z@<-T#jEJR@BwgO2=Fn}aK@a|=Wo@d`+$+tUqxbf&*#8KRdL0dgP;~@G9wPc0bY_<4 z4$@5unCb|f>S*gn^hl$<(x{g(5>!tC4YhV#|M>vpdb_argk6MH@Q9(Esb z8s{-$1Vl9hl-H=}NWN)Q?$a)YH;&459p2bMnT^6N{jFq=szDsCt4;Qfi;N5ie=A|C z(LKdfuv8%*A8BcYN-0{ykurYq|2f$Po9Hfk11f3a(ujeQjCtz>{+7{1;21^C>$>=T z2V-5#s!sl_p;L0)IlCZ936y+}8oB^^iU3XV9}RA_=?_7Y?5>=p=6PzUu*ZM?kes@n zpy_F9UZUnKHDpP29e^q#oCb0-%9)$44Umd4SF~0=HlGtYD*Yubu2%>-MLCGWX*{Ie j@*Id2{X>XZiNOGbz+0lE(Lsrz$dN=+H${^K578nanxrJEMSC=uYJdX_W`@-b z5*P!rB}f+I_@EtIvK`w29p|vuaS|ua;Y~K#IEiy@&UKRQ+?!K5yhm;&S?~Y9>h75V zKq-><`*!B*>Q_~-u2-*Ky?RyE`}_KGG5l_OIbOc{`B?0a=?Q-a0qn)&KDH_rQ!zWH z;>xPUr!3ow6He3;c0%4sJ1Os!osxIjPRl!EXH?S8s+8U1q$hGJzA>iKla}2Z`7-e3 zjZdYYw-BG5Oxb--UVq5xAB)?ooB_C3IfHOlJN@<=XRW;s`0MQr^{1VU2E(~}%p#7H znYaefYG)Jh`UUUW#p_hh<+%O@XF%l^;z;$-;h5@G`R8IP?_9r-unV*O&J9jsA+`{Y z#~zQ>uS>?98z*y@;yN9V@gJsqB393xrmv6rJXuivYSnWw`zAG@uEP6fqn8eyy_w>9#vV>N@Lzuwx?v#&-)8u3RJW+j;=5ICLEMnKReW3F zyKNx>tZkBJyV@qfFLikUIJ*V=LA6J{N$r);f+{G`8s+Z8f4@M(YB+!nr~?6XcUK7qrH+TxK`HOB zwCf>t7`|K3rY#E=-bd7%<^8bQFI;qVGKD&QK^;S#Sk5hKL_H!H$JJZZ2?;r=PN_!) zdQ3eosZLMEkm@hgV@UOc#MtUdbw;4Ks<)|tpKsCHxEU{Q!JFD84Vk_e1Mr#+8sCRzISCRG=@Z|EPXUpdVNNN&Un^Vtl_{ zL@!;desa>fl++({is;qD<1Mz_{Jv z-0s}6z%+}_w#A*!?dr>nT_3v)`E@CIZt?NhleZ(c?Ik>@%TKR|46t_z?k@E+#KOq? zS@ji+ysyZ}y9*rrbL!`%O+)GzBIp+*=$9hszeLb4N6^nl(62<$uSU?XMbKXL#h{E| zkD%X(p#K^{UnQh|Lw!|7$ZtmBzZF5h9YOysg8T^jov6g$jiBF)px=+6KQPevM*HrW zP0rn*_8Ot~U2SS#N}uZ&dfx5a1$vU+cY&thfQ7jFL-j|O(IcSde^-ADYW}fM^Bz$1 zPt>0ZPYkI)i=aP8{lF*V3Hx4@c5hVLJzb^ENon^w_XMTggVO#&N_$!TWl-8*slP^P ze=VimhtmEAWqb|1HKhJW1pUtl`d=v5#QyIH`r8Qly9n>^Bj_I@=pQ5KpCag=Bj}X~ z`j-g$*9iKz2>L$}^z{(B6j!khINkwU9dM!pPIkbl4mjNbXFA|)2i(&E=fbw54!s?4 zz60*-fcrb(RUP>Z7%`+XtK>!0Xf(;jU-@;=Q5Ex3SB2b?6&U+PhJw-BFz$=&I8nN}YB)4+M33 zK`ykxg?L%=F1L#Ipw>k2u%c$Lo9GdsB#mH(R_9vBYF61@kaDhOIpy^gAG0)^XOx zxyd<-@PiAG`A%x_EzbVSar=Zo!+=f-bO6vPfer$CRG>qE9uw#=pvOVmUi~a`A_Whl z{L^?pgy#t0C-8nVo`(V3c(cS~fS*MAGk`~s!&?LR5x{Q?;NyTx^;5uj3!X676M(~9 zPXZ2eC0=K)k9OqxSVyjpGgq{mH9RftzR6(?p9o;q&<@}y0Y52l{!(6l4nEfRt*HA` zc)tx#32@mNwJW0cD_c2g(LwlAm*W#-_V|+uZ8@vrD6{IMCMN7jr)EzHZr!ds^@)bv zfZMd2Pu7vDY@efE25596`)qVFj&eT&sA6ll%68#8wpXWI7?ZGZ2|J6hCc>(C!um}B zHubC671VJY{z*JF)R}lwc+6OCldZrIGXVV{G$q!yqe z>CgC^9a=A2y<^9Vb?VA)UO z&30eFGxJ>BenR$V&P~oc06!<~mvvsW zmLvYtPGVvK&vRoIu%5^BF4X&ZguH7a<~(1Db;ZAc_=|{t0iiD-{)JMko+fNF=9}=m z8}+(~_c!Bt4`7CU4A1-VTtdos*A$P_PK#>#fG2laksJm!3`^ev#khc2zshi3LTA11sIhZMi#jAg%Qzl1gC zOAGOd+tfACO;_oD=Ot*Lw*@e8ofjXB9gkgFCA!gzPrk(Tiyv{eT}E%H#L3u}PsC%f z*4>YK)mqi7I&NXCp$iu@;QDx>(ojypJ6rY&db;iw8uh~QS??@fI|}8xD(rlj3-zJ6 zzp6ayHfqzJQ>soi8`>Lb-MSxxsXSFh3deJ_i!{4OgP^@l$)woO<9k}^JBxSjD&Fz3 zg`8eaFgr^p3DfbD6Xo;e>es*i^>j&oCaB`KmZjtdSbaH6du$E%zvH=FL6Q~S2hOw~ZLsj`P=o*Avy&$xn9tB#&=t24#ste+{BYEHdW+JlT; zRy`JP^_(sSp(Chkwp6N@r<_vB&y`A34K-aOoG+EmO_ysXM)!u*YFALxa-uVEcH@z( zvzGX4DFl4|9p;~jRisn|@y$ml!PIl{xmYQnN2{<;VVT^hOiwv=uk2OPXcjMe8QTG zZH%FZ;;nm*2tO4JO|})Z<4x;2S<^e~1fhlU7)UPxwX*A>0bX$^K}s5C1SJ#htm|h? zCN4XgG+kQf{t(~b9cRtSEHNt^*X!}F&=dYx_q}-BO>ma8ph|)jlPZN)q*0Zl<<{&0 z$F1ns>HPfw z=3aXc+G_A|<<(dYM+N)_~ti>~P3hk)uRWW_sFjq{E=CgXdg66VD>yNHjw1CEC=fU#gLT60iWFd`Cq%UnXmCbPOQ7oSheB`{`_ z_4FLZNhzrUj!F3`ce?3leG{^fmimd~Pwey)VCR6!rdJY;n(`BM=K`4QTH*>VgaV{b z6^~20#o`0;B&dwXYOU$0dYiL41uQvmm}!|42lGUHfnpXMV#)B)w;%##3P-o;F9lzX zD9ywHA9oiQa91>TNHIQA9J@1MWD9fy%@!=MG-v~+7KRQPHKAFIp8g?*WH0tb!-{B?8FZ0wCm2;ZIJFSx%2eHbL!Z^JdEL zl>uaUGa@v5gBkL1uY077 zH61lt&T?VZhMA@ShtvPKH4jl{T8OH}4q!&PC&n-^VroRQ8qyQdG!$lYlG$(?8V57I zHOC|qu@6}1k|)jhK!uG!_O0R_dMoqkX5m+IyK7a~t2CyXSm_}%=A4eTZtZS`s)Bcl zyK9X~x#sQx5T?PbbQQP{@}Wcq6A9@C^50G|$X!yR%*4#@Sr$j0AS3Q=qjA2>VC)j(s_YC*Zq$GMkqEtspG=aXj+pa+HUBr_w7)Mv;h|88d#r zjG295gGVaT4CKIf#CZyyrJM($_|S5pIF}MJxpxj?5#n+;#AT|UoJ-E9=2GC&RDFXH znnApcc*HU$6r*Xwzq?SR;or@K*eL1sp>%sjwlk2Vlo)mf(o}#fL)ck%mO+F#yVvQp z^C}Axt?}2CPF81*VRATL_Rbo`)6uP8&K2<6SC~RXp^2D+Q7fr(HUhXnO`n6;m32|o za|`bDsG+G_%t;QX&Q{%mXhBR=bEsnjy>R^C@tt?3R!YlRcn+177Itbf_t_x zT~mco2TQRj6kZ*zIg;CjDk>5##)@}Ix^k^naAqJ-s2`ulq6;cJ7Hwkz-cCu_CEF>Z zhDp}}IA$qO#ZWI{J!P22GAcvgWW8~r9!!prR9b~r2Vt^&S;{>W|KG#h4fjAxyLKfC zx4D|t;g9%Sz#PUS@?@qkXJN^^0lE))8daQ#tN8lZTw**vF-pcn-g+jpZ+BHz>8zm zvDK_PJWF9#rhWO@xLJV&o!+`Z#+Dg2SpH!Y)*D`dJR2?%(Ht^`pMBVwm34>TYX-Pk zgj|{MHWuM+V*WVW$?C@97FyaLZ{4smTXdv}no9adO+5k6Y|aB8&xK93(A4X?7Fe-G z@cZTXyyYe5EHAZS={}3)+8d&MIXPE_v*R&kWhDCeP`$bZBw#SL}?=R{6QA zTdliZxn6P1>Ou5mET9cZp~7}>gP99uHWP;`g5OKbfEm7w05QyfNEV5d2C4AJq=pG( z#o81*l1KC+uX6YaYfwOOZd(MsU><{xkJehPPl`;VfIcCXfr6)}oo$8cn3w~&%2y!^ zK>AXCS0>BjP9Tz*?s2D3cYx(lJVVBxZ2)c*(B}}jT00d8;igk5V0{YMEl}`Ag$4q6 zXK`0yi|aU)<-6{>b0~_ZVTWT!tR)4F~| zR%_#S5su6zgyXuG%OkUtG={I%0|B*08g<9aPKJc%rmNafLceyt1rR2hu^dI)#9n0j zYAwie$k2G`1JTIPB>H}3@gL%Hoz%BfgAk3K6N$c`&I5EFq_dX}Tc{7w;Z))0n3da@ z*5KndgGOAc?*`JR@d$TjGf69(#6OWsWz*UIY!B30VpQ-%Kdx~1;#p#8v=dl9B^8-4 zfk{KO*dv}Z<+iS+sYj-8&cc{{MW+whX9b&twD16mSh31S5b>uhpCcg)-B2^c?x~!_ z8lgUZpcn&B%Ss!8qjnEM z8q*##&O(uyf|ld@IjBfY-KaRO>!)3SIVNg>|GQmfo zJoz~y7=rqVR<&s-t8o2ZI1q@art5)bUA{`AK2{xXmNin^`C;)LH@;OJg_O#p6$~48 zr8uq5$RZ-`Or_DB)y_D{)SQJ`Z5$fx=u~OEK5Zw*0k!BY(wk*?9S*c$gwtCTZ(m7G zPfg91Fl9Ss1?{%O7nJ@oZ7^6r1}Ob!J+F!J+L)|56J-tV5)kxmNmQ{NmG3}x?ewr9 zwj?yba_zpe-c+pw^;R|%Y-BM7`!{^E${BN85?zdzg|5>PyL1eDF0s?|-;j#OM4UV@> zD#xpp$r{FK)4VaHoN0`+xA5g>wA0k+7C%YA&by7uq~isd=TC~ylxwF0&dYd{B2eti zsQ~B?LSVKi*Baw?`hdJ!VR$hJ9}Mvz%A6o9cL;ERAI29u;@5;}V2J0?S3;Bg$-pNh z3vr5FIALhX6`BO)kkC`|yh%F&rz096SB0q| zVW^;akZ`9aBTp-g$JmSF!z8M%?9{1AyxMs(EUYI6e667?XB!Q4kq!C|(?4wdtq==2 zcVLB~NINyvs0VbAwDd2fL>E?+)|>3;le{Wf{00g4e# zQ36=r8+vI16Bs5CpPe?Dwfeg9wg-bUbYryP8L8*@c6eF=La`&v?!)e*ZdO9U*kimD zjX_K(97DulG!B|JpFVc<#PMM>CZiMqO6o$jzH^74#tYLBCJO_!U@C&0qu5HD>ia42 zudFmtYwtGKh{AO3F|);K><7rL)J5NWUR8xEU{5(Rw{&$3c2ZYVA6HTZYp5B{2pQ!X zo2K;Jy~o+z2(>1_}Mzt>Ku@zkP z>>Pro>n1M)2jQZSl(1b4ooTvmbe4PonBalX=QT>BvmTTg7Uk5bTiZFw1|nF`+F5}; zRG>$oX^v@wAX^!BA41#`t4PC>-7DU@Lel#ALD(d23pLED1m>Ace#G+mDx zv`0u+m>ClPZ+O8Boj1S>lJNEL!W8+u27?96iV(Ej^0%?y!3}6`HU$(D6Lb z@l>O6@+=NKNa&KmUzCb0A=(OI2Jdwd?fNSXAnl;zT(5QC>mNcv($_<{ItcY_2jS=! zay6Pxz145{q!?su_lu1I`&*2qp3DV?ihNurbHTm?$wa8`2t#bH>#$Oo!SNJ3bHX&y z?tetGIe--_bboEHXYU+v#>!B?q&ptMDg!2Mdo?3YKt~{ASD?{eal(UZ5M+@DI5Qif z^op(&WU`8x9LE;+g@#tg#BJicbkm8=s_IR&{gL=gcLlLnp3V%H|-vbsi5=w55d|r+xF+1Bt2wY z6EwD08PO^_KX=H&fJx`v`4R(KYr7`TkjGkUx+GLM8oEM5+}AbW3RfO*ZWF4P)4KnS zj5w3_^$fWNhS(2v3_1Gk46M+jvcidmoXOeFBc@V=f(NZ`Qt+zvv4)*PFae;q)-9!E z(4e(TNV!rlXla697cp}=2tH(k7EgPo@}YI)jBc^`+(;Z%FEQL0_Cs^6J&V!&vBh8>^!+*RSk`v zZLB48oDAM2VP@Xz8i=rNn91r2oOVUBMw|EOG}$K_Ch=(K0mZhpIL0fk|Ur7X!OyY%AZ%j$O2(;Yn!4-5;}| z&a-wr^c5I}H}1m;z;e9+>){m5;B7^F1bYBHFeCOyv$+Bdw?n_2^PG;z9eR*baU3Y#_euz8@*w2~)Y^dubG@fA#l zIGV#P6|f5T+Lt!GS4&aS_P=G@$&xY4!WIZls^N%Ge_YQZgt^gTiPLa^c;*xk46YQ^ z)0!tg5ljsq5VoJNu%J%E!*e``s2o|{J{8lB5f##SSrV24sKAg8V} zA}-|VI#0&zbLPYwPJZ&ljvO0Hj6{i8sRSMl6qku`LT6wed(=FmGnsx2XM4|KZx9u= zgmTyiFO|4WF#}v!Jnz8EPfhXw;*u_VrDyjzbkUin-m@vJax&Js28VNMWeB{EgE}D| z?EQ&yeb!HUvrU-1a9me6c*aX#k5ZOo?`Ok|t_gFG#r3;^7_|*-(tR|UTd^RxIvm_aCq<>c_mZmWA6P4Oh`uJ-rGDk!u znGLed9p$_aNmp(S_k+2Y8!(W5C;Xrjk9sU=jb>%HwlnRjcM5S*Z&uRd1|T=C!77)7 z^QQ?v#lFCX0?uw=`|cDxao-2yuf9yRr?}#mSc0Lcq`wO4LfyhJ5=l9+~>gIj1r7!28(T)N?{ss%QQ(SSqc7cNRNJoL6ATd5n=c9DKb!quz zTj|g|07?qeouRy7pS%iPrO!^ci|gvX*T|Z*f3>U`|KuCXnpA$ZtQr5dHGK`geHs`oYtMdv z04fW%@rR)Scn5hG=w)VX51QZ;b?w_t# zMJvWVnsU>DfQND=fi&wPcKBjWmVEXA@Dqtj!4ytuthxYx*UVnj3_gt&L z3t^xA2FuNd<#NcpW{Ka85|MzLiQhEn&O>HNlG>So0zL7WfkL*3*f4J_xxRr}}YZ5;7zn7&elvO$R|CTskvX zTCs&#Zkw-)Bl&_;?so=}!Uf6*%Gk^vtHY7(eMTpGMlWr2QPz z9*>>-+~YVOuYC-J{6c&_gEu}KHlM9$RenCB`sTAz*R1~9To(0a4YK-`xy*cOW}BDc z8WQ;y&RFw3^Xa)Be2naBT%AdBNa3C4`%?T|5@lF(DH&+AxAnJnox&0icVD)<-fYb& z2$7@poLnxCZY}JRTMIjicNKAy7?$>Kiw`?vH9JGW;l#`t${pmzJ~RU%7jZEX1b$57 zNb1Kf#wK|Grw3tabQrj0At4vSDEl5#lo_$b46(3qx8{+n|e= zT7)&$H2g_c(?192Z^)CBDKU)oEY3>vpO>pYI9k3|XxDLBOmaPh3OLPEtxa$XQ`5Nb zjLRV$v<2MTFBif}Iym*S3Cw3ex%dq$f5Y_KmLWf$dnH~N z%KC|BqiMt+NTaZF7s3b$^v|LJehn9_rGfTtEUVC2&Gt zY!Mh834JdnBC;Xgi>b@7ya{_HM}%!4|8d~s>>=EnPGBN2r}cjond@JHvRtK1p*tbU}=%`4mj<>tJ%{J&d9;xnD>>fh#wsVWLNqAvhKiu)pF8&dkko z38ddNpX7*S>cxb~r;2gNQFiVS#0AD+3B)q1{FE6rVrb{?f}fslV$3+SbYIZ;!~(8E zmO!bD@#qjx*kyptXuR$ytO6v7GPg20B`l$kzp7+Vf-H5B)RJ+R$UzsOc|L0mfNGn- z2Ko<>L!^xWQz#`=GOLscUaez0pntp<_d~I2gbMV#Ecf^5{5~86oWaY_VEf6#0Y=Gm z8P?KzLT;3s?jF`odWjz%_#OM^P5sL!%y7D9g<@xtLb0Am8X0GKx!*hTD>z#n}lb8Te&3#HZ_0=8Ffd z8$?BfS+#63jxM}LK@lHv4alNI)^m4-7h0f)#^qa_)?`W-J#f-R$Z1Zm7dfzYT*NR_ ziQTa}{PW3+m=?i_xa^I#Vwiz|G?Xycmq{wWzf@}I>bM*W#?}eOm1uH#*gAMg4rkl( z36J*^6DQB*O9loNDxX+!*D)%`DA+CmrOxaLJA5o4QIz=A2|0MtCJZJ_OJo5R;9=dGD_BDMQP4Nc7%IlF^$ zb`dI_k}pjPoG`G5i(`uEnc)CfP>;no`5b(C#15y(z? zQajkJl#gj(Zs-1*pOh;LzXi~!1k?s(D$@rEaZ5r@Wim$@9y0#Ok9Lm?%JVp2ff=8L z;qaoAi6Bh*Q3o3C{2e42WlwDYK#fU8kWBWR^Em6_j?s0p`@CH61QSTe5Rx}@Gyy?j z!W?~FIXo5dhL!Kre~T2aq^7;G?f2@R2E1%U4kOUL7mjG9&{*|4=$v9%;n-Y~NEb228oo_7pV3r)2$AC;*E-VwhkF@5pawM}8q@XI^5B>d4LbCBKfS)_PvU zgxQgIZ1~9@)V*-)X7^S~3el^3cmQA8q(yP=?P#U`HBeD=Xk%1=ittzHe3?!*a!ijo zb=f#KsVV-AF4RkH8Y-Wq{}BmR?yJ*?F@5D(WBA^Ud!2_bN>iN+d&NuX`=j2xAHA98 z+|Ttx{c10xPe~k7>QjizhH;|dSgjup`_h%36mtLn>Axim(C#wIEqpAGjMm1vV&4Tz zBK`O5f`6bB^%K80vqL&kZ$^ZlHaWj$_vM1_1E)RDzN5&9_4eZ%Ak@q_w)HCrS-Jnd z!LjyZ-TlM!E)h@u`}-*xbSn(7a6FoOmaj3$+8rRH^o@2>IKWH?{WFMX`YY!kBJ)EEp=2yd zmXCIZ?ZjcA=$iOhNT#5pPw_BNli#n4QcD3l<y)rC z05IdK8N-`h1XRj6#UD1aek+=5DjMXwWvIBU^G>4QWavSP;j3WWi3#__%!CJf29(N( z_A<~hOLGmmR3K+}*UD3)s=Vh*U<=NfvDWsceDrU{hIAaW3DRML_$TH&1IN!j#*ZJmDqt!8NZ^*5^UI+ujig_c&0<*6Kz;AL-UDA1uTe9^?x zf5-ee)n5_*sC-t4-nC8nwEK8lYTVH#OlC{l=-WXE#Wa>BygDMn z0AIl?&$!i=V`q--JALwqeNg|56vLaXc8_SXa)9QuOqgwAHxx^wDO}dYipF!fAn?=X zF1&nb&`;ntir@DL>OWR%Tri!)b+Y-|%mzX?F{_+8hAMgA&d^9j6P522ptL# zvzDhtz?`((M~GV)avaax*14|h){vaK>-=_-o&vfZA&FscIZ~ZSg)pb!c<6Gp2GMS4 zqnT6B%kgMOv2Tv>+DCzxqn-n*9E+4Qfp$&~A(;bx0nC$ucAo@=X9I!OKGoN6U<_?J z9~hzr=lb+BEMNhS-!p>E3ez?3X5f41T%z*X4DFR& zTe-2$F#WyE@I!EHIZR^)(3=^^6DxjZKS$J&NA&kG=m+ThAe}GL`9nGsr?Rsvd!e#1 zslS_HpP@5EXM_%Sb@Z3${3M;P()n#T{_4k1>^pw^;EB?{lP8ZH9yxY!{kcIH0Ut)W;dDG;McX#}-9Ki@KiOJtard_pC)4 z_pc!=VZ4Q{EBMneo21@12q-OogjnXISTx`gJC_(p(sDOo^(V6U3}hDHa2U91VC~?l zfsF%uvWbBVkK6LkFOY=qb?}*WBG-y0oTXkZb3hJnhF|G#>SyMQZXE8>@iC!cd@_-CJ`12~=FuI~VQHH4CBRxPO`k{nMaaR3p?YpUjibn44@L5ZnBb zV*;`APsjVM!TdnpN@Wx4;#Mk$bhH!a?^?sOnY^_ogUy3H?RbNzS2~g3kRKdaJ+NkA Wb$%_|f>M+CC|?ScL|wQ|@qYmo4r(s| literal 0 HcmV?d00001 diff --git a/bin/splunklib/binding.py b/bin/splunklib/binding.py new file mode 100644 index 0000000..c3121fb --- /dev/null +++ b/bin/splunklib/binding.py @@ -0,0 +1,1415 @@ +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The **splunklib.binding** module provides a low-level binding interface to the +`Splunk REST API `_. + +This module handles the wire details of calling the REST API, such as +authentication tokens, prefix paths, URL encoding, and so on. Actual path +segments, ``GET`` and ``POST`` arguments, and the parsing of responses is left +to the user. + +If you want a friendlier interface to the Splunk REST API, use the +:mod:`splunklib.client` module. +""" + +from __future__ import absolute_import + +import io +import logging +import socket +import ssl +import sys +from base64 import b64encode +from contextlib import contextmanager +from datetime import datetime +from functools import wraps +from io import BytesIO +from xml.etree.ElementTree import XML + +from splunklib import six +from splunklib.six import StringIO +from splunklib.six.moves import urllib + +from .data import record + +try: + from xml.etree.ElementTree import ParseError +except ImportError as e: + from xml.parsers.expat import ExpatError as ParseError + + +__all__ = [ + "AuthenticationError", + "connect", + "Context", + "handler", + "HTTPError" +] + +# If you change these, update the docstring +# on _authority as well. +DEFAULT_HOST = "localhost" +DEFAULT_PORT = "8089" +DEFAULT_SCHEME = "https" + +def _log_duration(f): + @wraps(f) + def new_f(*args, **kwargs): + start_time = datetime.now() + val = f(*args, **kwargs) + end_time = datetime.now() + logging.debug("Operation took %s", end_time-start_time) + return val + return new_f + + +def _parse_cookies(cookie_str, dictionary): + """Tries to parse any key-value pairs of cookies in a string, + then updates the the dictionary with any key-value pairs found. + + **Example**:: + + dictionary = {} + _parse_cookies('my=value', dictionary) + # Now the following is True + dictionary['my'] == 'value' + + :param cookie_str: A string containing "key=value" pairs from an HTTP "Set-Cookie" header. + :type cookie_str: ``str`` + :param dictionary: A dictionary to update with any found key-value pairs. + :type dictionary: ``dict`` + """ + parsed_cookie = six.moves.http_cookies.SimpleCookie(cookie_str) + for cookie in parsed_cookie.values(): + dictionary[cookie.key] = cookie.coded_value + + +def _make_cookie_header(cookies): + """ + Takes a list of 2-tuples of key-value pairs of + cookies, and returns a valid HTTP ``Cookie`` + header. + + **Example**:: + + header = _make_cookie_header([("key", "value"), ("key_2", "value_2")]) + # Now the following is True + header == "key=value; key_2=value_2" + + :param cookies: A list of 2-tuples of cookie key-value pairs. + :type cookies: ``list`` of 2-tuples + :return: ``str` An HTTP header cookie string. + :rtype: ``str`` + """ + return "; ".join("%s=%s" % (key, value) for key, value in cookies) + +# Singleton values to eschew None +class _NoAuthenticationToken(object): + """The value stored in a :class:`Context` or :class:`splunklib.client.Service` + class that is not logged in. + + If a ``Context`` or ``Service`` object is created without an authentication + token, and there has not yet been a call to the ``login`` method, the token + field of the ``Context`` or ``Service`` object is set to + ``_NoAuthenticationToken``. + + Likewise, after a ``Context`` or ``Service`` object has been logged out, the + token is set to this value again. + """ + pass + + +class UrlEncoded(str): + """This class marks URL-encoded strings. + It should be considered an SDK-private implementation detail. + + Manually tracking whether strings are URL encoded can be difficult. Avoid + calling ``urllib.quote`` to replace special characters with escapes. When + you receive a URL-encoded string, *do* use ``urllib.unquote`` to replace + escapes with single characters. Then, wrap any string you want to use as a + URL in ``UrlEncoded``. Note that because the ``UrlEncoded`` class is + idempotent, making multiple calls to it is OK. + + ``UrlEncoded`` objects are identical to ``str`` objects (including being + equal if their contents are equal) except when passed to ``UrlEncoded`` + again. + + ``UrlEncoded`` removes the ``str`` type support for interpolating values + with ``%`` (doing that raises a ``TypeError``). There is no reliable way to + encode values this way, so instead, interpolate into a string, quoting by + hand, and call ``UrlEncode`` with ``skip_encode=True``. + + **Example**:: + + import urllib + UrlEncoded('%s://%s' % (scheme, urllib.quote(host)), skip_encode=True) + + If you append ``str`` strings and ``UrlEncoded`` strings, the result is also + URL encoded. + + **Example**:: + + UrlEncoded('ab c') + 'de f' == UrlEncoded('ab cde f') + 'ab c' + UrlEncoded('de f') == UrlEncoded('ab cde f') + """ + def __new__(self, val='', skip_encode=False, encode_slash=False): + if isinstance(val, UrlEncoded): + # Don't urllib.quote something already URL encoded. + return val + elif skip_encode: + return str.__new__(self, val) + elif encode_slash: + return str.__new__(self, urllib.parse.quote_plus(val)) + else: + # When subclassing str, just call str's __new__ method + # with your class and the value you want to have in the + # new string. + return str.__new__(self, urllib.parse.quote(val)) + + def __add__(self, other): + """self + other + + If *other* is not a ``UrlEncoded``, URL encode it before + adding it. + """ + if isinstance(other, UrlEncoded): + return UrlEncoded(str.__add__(self, other), skip_encode=True) + else: + return UrlEncoded(str.__add__(self, urllib.parse.quote(other)), skip_encode=True) + + def __radd__(self, other): + """other + self + + If *other* is not a ``UrlEncoded``, URL _encode it before + adding it. + """ + if isinstance(other, UrlEncoded): + return UrlEncoded(str.__radd__(self, other), skip_encode=True) + else: + return UrlEncoded(str.__add__(urllib.parse.quote(other), self), skip_encode=True) + + def __mod__(self, fields): + """Interpolation into ``UrlEncoded``s is disabled. + + If you try to write ``UrlEncoded("%s") % "abc", will get a + ``TypeError``. + """ + raise TypeError("Cannot interpolate into a UrlEncoded object.") + def __repr__(self): + return "UrlEncoded(%s)" % repr(urllib.parse.unquote(str(self))) + +@contextmanager +def _handle_auth_error(msg): + """Handle reraising HTTP authentication errors as something clearer. + + If an ``HTTPError`` is raised with status 401 (access denied) in + the body of this context manager, reraise it as an + ``AuthenticationError`` instead, with *msg* as its message. + + This function adds no round trips to the server. + + :param msg: The message to be raised in ``AuthenticationError``. + :type msg: ``str`` + + **Example**:: + + with _handle_auth_error("Your login failed."): + ... # make an HTTP request + """ + try: + yield + except HTTPError as he: + if he.status == 401: + raise AuthenticationError(msg, he) + else: + raise + +def _authentication(request_fun): + """Decorator to handle autologin and authentication errors. + + *request_fun* is a function taking no arguments that needs to + be run with this ``Context`` logged into Splunk. + + ``_authentication``'s behavior depends on whether the + ``autologin`` field of ``Context`` is set to ``True`` or + ``False``. If it's ``False``, then ``_authentication`` + aborts if the ``Context`` is not logged in, and raises an + ``AuthenticationError`` if an ``HTTPError`` of status 401 is + raised in *request_fun*. If it's ``True``, then + ``_authentication`` will try at all sensible places to + log in before issuing the request. + + If ``autologin`` is ``False``, ``_authentication`` makes + one roundtrip to the server if the ``Context`` is logged in, + or zero if it is not. If ``autologin`` is ``True``, it's less + deterministic, and may make at most three roundtrips (though + that would be a truly pathological case). + + :param request_fun: A function of no arguments encapsulating + the request to make to the server. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(..., autologin=True) + c.logout() + def f(): + c.get("/services") + return 42 + print _authentication(f) + """ + @wraps(request_fun) + def wrapper(self, *args, **kwargs): + if self.token is _NoAuthenticationToken and \ + not self.has_cookies(): + # Not yet logged in. + if self.autologin and self.username and self.password: + # This will throw an uncaught + # AuthenticationError if it fails. + self.login() + else: + # Try the request anyway without authentication. + # Most requests will fail. Some will succeed, such as + # 'GET server/info'. + with _handle_auth_error("Request aborted: not logged in."): + return request_fun(self, *args, **kwargs) + try: + # Issue the request + return request_fun(self, *args, **kwargs) + except HTTPError as he: + if he.status == 401 and self.autologin: + # Authentication failed. Try logging in, and then + # rerunning the request. If either step fails, throw + # an AuthenticationError and give up. + with _handle_auth_error("Autologin failed."): + self.login() + with _handle_auth_error( + "Autologin succeeded, but there was an auth error on " + "next request. Something is very wrong."): + return request_fun(self, *args, **kwargs) + elif he.status == 401 and not self.autologin: + raise AuthenticationError( + "Request failed: Session is not logged in.", he) + else: + raise + + return wrapper + + +def _authority(scheme=DEFAULT_SCHEME, host=DEFAULT_HOST, port=DEFAULT_PORT): + """Construct a URL authority from the given *scheme*, *host*, and *port*. + + Named in accordance with RFC2396_, which defines URLs as:: + + ://? + + .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt + + So ``https://localhost:8000/a/b/b?boris=hilda`` would be parsed as:: + + scheme := https + authority := localhost:8000 + path := /a/b/c + query := boris=hilda + + :param scheme: URL scheme (the default is "https") + :type scheme: "http" or "https" + :param host: The host name (the default is "localhost") + :type host: string + :param port: The port number (the default is 8089) + :type port: integer + :return: The URL authority. + :rtype: UrlEncoded (subclass of ``str``) + + **Example**:: + + _authority() == "https://localhost:8089" + + _authority(host="splunk.utopia.net") == "https://splunk.utopia.net:8089" + + _authority(host="2001:0db8:85a3:0000:0000:8a2e:0370:7334") == \ + "https://[2001:0db8:85a3:0000:0000:8a2e:0370:7334]:8089" + + _authority(scheme="http", host="splunk.utopia.net", port="471") == \ + "http://splunk.utopia.net:471" + + """ + if ':' in host: + # IPv6 addresses must be enclosed in [ ] in order to be well + # formed. + host = '[' + host + ']' + return UrlEncoded("%s://%s:%s" % (scheme, host, port), skip_encode=True) + +# kwargs: sharing, owner, app +def namespace(sharing=None, owner=None, app=None, **kwargs): + """This function constructs a Splunk namespace. + + Every Splunk resource belongs to a namespace. The namespace is specified by + the pair of values ``owner`` and ``app`` and is governed by a ``sharing`` mode. + The possible values for ``sharing`` are: "user", "app", "global" and "system", + which map to the following combinations of ``owner`` and ``app`` values: + + "user" => {owner}, {app} + + "app" => nobody, {app} + + "global" => nobody, {app} + + "system" => nobody, system + + "nobody" is a special user name that basically means no user, and "system" + is the name reserved for system resources. + + "-" is a wildcard that can be used for both ``owner`` and ``app`` values and + refers to all users and all apps, respectively. + + In general, when you specify a namespace you can specify any combination of + these three values and the library will reconcile the triple, overriding the + provided values as appropriate. + + Finally, if no namespacing is specified the library will make use of the + ``/services`` branch of the REST API, which provides a namespaced view of + Splunk resources equivelent to using ``owner={currentUser}`` and + ``app={defaultApp}``. + + The ``namespace`` function returns a representation of the namespace from + reconciling the values you provide. It ignores any keyword arguments other + than ``owner``, ``app``, and ``sharing``, so you can provide ``dicts`` of + configuration information without first having to extract individual keys. + + :param sharing: The sharing mode (the default is "user"). + :type sharing: "system", "global", "app", or "user" + :param owner: The owner context (the default is "None"). + :type owner: ``string`` + :param app: The app context (the default is "None"). + :type app: ``string`` + :returns: A :class:`splunklib.data.Record` containing the reconciled + namespace. + + **Example**:: + + import splunklib.binding as binding + n = binding.namespace(sharing="user", owner="boris", app="search") + n = binding.namespace(sharing="global", app="search") + """ + if sharing in ["system"]: + return record({'sharing': sharing, 'owner': "nobody", 'app': "system" }) + if sharing in ["global", "app"]: + return record({'sharing': sharing, 'owner': "nobody", 'app': app}) + if sharing in ["user", None]: + return record({'sharing': sharing, 'owner': owner, 'app': app}) + raise ValueError("Invalid value for argument: 'sharing'") + + +class Context(object): + """This class represents a context that encapsulates a splunkd connection. + + The ``Context`` class encapsulates the details of HTTP requests, + authentication, a default namespace, and URL prefixes to simplify access to + the REST API. + + After creating a ``Context`` object, you must call its :meth:`login` + method before you can issue requests to splunkd. Or, use the :func:`connect` + function to create an already-authenticated ``Context`` object. You can + provide a session token explicitly (the same token can be shared by multiple + ``Context`` objects) to provide authentication. + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verrification for https connections. + :type verify: ``Boolean`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param owner: The owner context of the namespace (optional, the default is "None"). + :type owner: ``string`` + :param app: The app context of the namespace (optional, the default is "None"). + :type app: ``string`` + :param token: A session token. When provided, you don't need to call :meth:`login`. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param username: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param password: The password for the Splunk account. + :type password: ``string`` + :param splunkToken: Splunk authentication token + :type splunkToken: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param handler: The HTTP request handler (optional). + :returns: A ``Context`` instance. + + **Example**:: + + import splunklib.binding as binding + c = binding.Context(username="boris", password="natasha", ...) + c.login() + # Or equivalently + c = binding.connect(username="boris", password="natasha") + # Or if you already have a session token + c = binding.Context(token="atg232342aa34324a") + # Or if you already have a valid cookie + c = binding.Context(cookie="splunkd_8089=...") + """ + def __init__(self, handler=None, **kwargs): + self.http = HttpLib(handler, kwargs.get("verify", False), key_file=kwargs.get("key_file"), + cert_file=kwargs.get("cert_file")) # Default to False for backward compat + self.token = kwargs.get("token", _NoAuthenticationToken) + if self.token is None: # In case someone explicitly passes token=None + self.token = _NoAuthenticationToken + self.scheme = kwargs.get("scheme", DEFAULT_SCHEME) + self.host = kwargs.get("host", DEFAULT_HOST) + self.port = int(kwargs.get("port", DEFAULT_PORT)) + self.authority = _authority(self.scheme, self.host, self.port) + self.namespace = namespace(**kwargs) + self.username = kwargs.get("username", "") + self.password = kwargs.get("password", "") + self.basic = kwargs.get("basic", False) + self.bearerToken = kwargs.get("splunkToken", "") + self.autologin = kwargs.get("autologin", False) + self.additional_headers = kwargs.get("headers", []) + + # Store any cookies in the self.http._cookies dict + if "cookie" in kwargs and kwargs['cookie'] not in [None, _NoAuthenticationToken]: + _parse_cookies(kwargs["cookie"], self.http._cookies) + + def get_cookies(self): + """Gets the dictionary of cookies from the ``HttpLib`` member of this instance. + + :return: Dictionary of cookies stored on the ``self.http``. + :rtype: ``dict`` + """ + return self.http._cookies + + def has_cookies(self): + """Returns true if the ``HttpLib`` member of this instance has at least + one cookie stored. + + :return: ``True`` if there is at least one cookie, else ``False`` + :rtype: ``bool`` + """ + return len(self.get_cookies()) > 0 + + # Shared per-context request headers + @property + def _auth_headers(self): + """Headers required to authenticate a request. + + Assumes your ``Context`` already has a authentication token or + cookie, either provided explicitly or obtained by logging + into the Splunk instance. + + :returns: A list of 2-tuples containing key and value + """ + if self.has_cookies(): + return [("Cookie", _make_cookie_header(list(self.get_cookies().items())))] + elif self.basic and (self.username and self.password): + token = 'Basic %s' % b64encode(("%s:%s" % (self.username, self.password)).encode('utf-8')).decode('ascii') + return [("Authorization", token)] + elif self.bearerToken: + token = 'Bearer %s' % self.bearerToken + return [("Authorization", token)] + elif self.token is _NoAuthenticationToken: + return [] + else: + # Ensure the token is properly formatted + if self.token.startswith('Splunk '): + token = self.token + else: + token = 'Splunk %s' % self.token + return [("Authorization", token)] + + def connect(self): + """Returns an open connection (socket) to the Splunk instance. + + This method is used for writing bulk events to an index or similar tasks + where the overhead of opening a connection multiple times would be + prohibitive. + + :returns: A socket. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(...) + socket = c.connect() + socket.write("POST %s HTTP/1.1\\r\\n" % "some/path/to/post/to") + socket.write("Host: %s:%s\\r\\n" % (c.host, c.port)) + socket.write("Accept-Encoding: identity\\r\\n") + socket.write("Authorization: %s\\r\\n" % c.token) + socket.write("X-Splunk-Input-Mode: Streaming\\r\\n") + socket.write("\\r\\n") + """ + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if self.scheme == "https": + sock = ssl.wrap_socket(sock) + sock.connect((socket.gethostbyname(self.host), self.port)) + return sock + + @_authentication + @_log_duration + def delete(self, path_segment, owner=None, app=None, sharing=None, **query): + """Performs a DELETE operation at the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``delete`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.delete('saved/searches/boris') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '1786'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:53:06 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.delete('nonexistant/path') # raises HTTPError + c.logout() + c.delete('apps/local') # raises AuthenticationError + """ + path = self.authority + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + logging.debug("DELETE request to %s (body: %s)", path, repr(query)) + response = self.http.delete(path, self._auth_headers, **query) + return response + + @_authentication + @_log_duration + def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, **query): + """Performs a GET operation from the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``get`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.get('apps/local') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.get('nonexistant/path') # raises HTTPError + c.logout() + c.get('apps/local') # raises AuthenticationError + """ + if headers is None: + headers = [] + + path = self.authority + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + logging.debug("GET request to %s (body: %s)", path, repr(query)) + all_headers = headers + self.additional_headers + self._auth_headers + response = self.http.get(path, all_headers, **query) + return response + + @_authentication + @_log_duration + def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, **query): + """Performs a POST operation from the REST path segment with the given + namespace and query. + + This method is named to match the HTTP method. ``post`` makes at least + one round trip to the server, one additional round trip for each 303 + status returned, and at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + Some of Splunk's endpoints, such as ``receivers/simple`` and + ``receivers/stream``, require unstructured data in the POST body + and all metadata passed as GET-style arguments. If you provide + a ``body`` argument to ``post``, it will be used as the POST + body, and all other keyword arguments will be passed as + GET-style arguments in the URL. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param query: All other keyword arguments, which are used as query + parameters. + :param body: Parameters to be used in the post body. If specified, + any parameters in the query will be applied to the URL instead of + the body. If a dict is supplied, the key-value pairs will be form + encoded. If a string is supplied, the body will be passed through + in the request unchanged. + :type body: ``dict`` or ``str`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.post('saved/searches', name='boris', + search='search * earliest=-1m | head 1') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '10455'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:46:06 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'Created', + 'status': 201} + c.post('nonexistant/path') # raises HTTPError + c.logout() + # raises AuthenticationError: + c.post('saved/searches', name='boris', + search='search * earliest=-1m | head 1') + """ + if headers is None: + headers = [] + + path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) + logging.debug("POST request to %s (body: %s)", path, repr(query)) + all_headers = headers + self.additional_headers + self._auth_headers + response = self.http.post(path, all_headers, **query) + return response + + @_authentication + @_log_duration + def request(self, path_segment, method="GET", headers=None, body="", + owner=None, app=None, sharing=None): + """Issues an arbitrary HTTP request to the REST path segment. + + This method is named to match ``httplib.request``. This function + makes a single round trip to the server. + + If *owner*, *app*, and *sharing* are omitted, this method uses the + default :class:`Context` namespace. All other keyword arguments are + included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Context`` object is not + logged in. + :raises HTTPError: Raised when an error occurred in a GET operation from + *path_segment*. + :param path_segment: A REST path segment. + :type path_segment: ``string`` + :param method: The HTTP method to use (optional). + :type method: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param body: Content of the HTTP request (optional). + :type body: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + c = binding.connect(...) + c.request('saved/searches', method='GET') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '46722'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 17:24:19 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + c.request('nonexistant/path', method='GET') # raises HTTPError + c.logout() + c.get('apps/local') # raises AuthenticationError + """ + if headers is None: + headers = [] + + path = self.authority \ + + self._abspath(path_segment, owner=owner, + app=app, sharing=sharing) + all_headers = headers + self.additional_headers + self._auth_headers + logging.debug("%s request to %s (headers: %s, body: %s)", + method, path, str(all_headers), repr(body)) + response = self.http.request(path, + {'method': method, + 'headers': all_headers, + 'body': body}) + return response + + def login(self): + """Logs into the Splunk instance referred to by the :class:`Context` + object. + + Unless a ``Context`` is created with an explicit authentication token + (probably obtained by logging in from a different ``Context`` object) + you must call :meth:`login` before you can issue requests. + The authentication token obtained from the server is stored in the + ``token`` field of the ``Context`` object. + + :raises AuthenticationError: Raised when login fails. + :returns: The ``Context`` object, so you can chain calls. + + **Example**:: + + import splunklib.binding as binding + c = binding.Context(...).login() + # Then issue requests... + """ + + if self.has_cookies() and \ + (not self.username and not self.password): + # If we were passed session cookie(s), but no username or + # password, then login is a nop, since we're automatically + # logged in. + return + + if self.token is not _NoAuthenticationToken and \ + (not self.username and not self.password): + # If we were passed a session token, but no username or + # password, then login is a nop, since we're automatically + # logged in. + return + + if self.basic and (self.username and self.password): + # Basic auth mode requested, so this method is a nop as long + # as credentials were passed in. + return + + if self.bearerToken: + # Bearer auth mode requested, so this method is a nop as long + # as authentication token was passed in. + return + # Only try to get a token and updated cookie if username & password are specified + try: + response = self.http.post( + self.authority + self._abspath("/services/auth/login"), + username=self.username, + password=self.password, + headers=self.additional_headers, + cookie="1") # In Splunk 6.2+, passing "cookie=1" will return the "set-cookie" header + + body = response.body.read() + session = XML(body).findtext("./sessionKey") + self.token = "Splunk %s" % session + return self + except HTTPError as he: + if he.status == 401: + raise AuthenticationError("Login failed.", he) + else: + raise + + def logout(self): + """Forgets the current session token, and cookies.""" + self.token = _NoAuthenticationToken + self.http._cookies = {} + return self + + def _abspath(self, path_segment, + owner=None, app=None, sharing=None): + """Qualifies *path_segment* into an absolute path for a URL. + + If *path_segment* is already absolute, returns it unchanged. + If *path_segment* is relative, then qualifies it with either + the provided namespace arguments or the ``Context``'s default + namespace. Any forbidden characters in *path_segment* are URL + encoded. This function has no network activity. + + Named to be consistent with RFC2396_. + + .. _RFC2396: http://www.ietf.org/rfc/rfc2396.txt + + :param path_segment: A relative or absolute URL path segment. + :type path_segment: ``string`` + :param owner, app, sharing: Components of a namespace (defaults + to the ``Context``'s namespace if all + three are omitted) + :type owner, app, sharing: ``string`` + :return: A ``UrlEncoded`` (a subclass of ``str``). + :rtype: ``string`` + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(owner='boris', app='search', sharing='user') + c._abspath('/a/b/c') == '/a/b/c' + c._abspath('/a/b c/d') == '/a/b%20c/d' + c._abspath('apps/local/search') == \ + '/servicesNS/boris/search/apps/local/search' + c._abspath('apps/local/search', sharing='system') == \ + '/servicesNS/nobody/system/apps/local/search' + url = c.authority + c._abspath('apps/local/sharing') + """ + skip_encode = isinstance(path_segment, UrlEncoded) + # If path_segment is absolute, escape all forbidden characters + # in it and return it. + if path_segment.startswith('/'): + return UrlEncoded(path_segment, skip_encode=skip_encode) + + # path_segment is relative, so we need a namespace to build an + # absolute path. + if owner or app or sharing: + ns = namespace(owner=owner, app=app, sharing=sharing) + else: + ns = self.namespace + + # If no app or owner are specified, then use the /services + # endpoint. Otherwise, use /servicesNS with the specified + # namespace. If only one of app and owner is specified, use + # '-' for the other. + if ns.app is None and ns.owner is None: + return UrlEncoded("/services/%s" % path_segment, skip_encode=skip_encode) + + oname = "nobody" if ns.owner is None else ns.owner + aname = "system" if ns.app is None else ns.app + path = UrlEncoded("/servicesNS/%s/%s/%s" % (oname, aname, path_segment), + skip_encode=skip_encode) + return path + + +def connect(**kwargs): + """This function returns an authenticated :class:`Context` object. + + This function is a shorthand for calling :meth:`Context.login`. + + This function makes one round trip to the server. + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param owner: The owner context of the namespace (the default is "None"). + :type owner: ``string`` + :param app: The app context of the namespace (the default is "None"). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param token: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param username: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param password: The password for the Splunk account. + :type password: ``string`` + :param headers: List of extra HTTP headers to send (optional). + :type headers: ``list`` of 2-tuples. + :param autologin: When ``True``, automatically tries to log in again if the + session terminates. + :type autologin: ``Boolean`` + :return: An initialized :class:`Context` instance. + + **Example**:: + + import splunklib.binding as binding + c = binding.connect(...) + response = c.get("apps/local") + """ + c = Context(**kwargs) + c.login() + return c + +# Note: the error response schema supports multiple messages but we only +# return the first, although we do return the body so that an exception +# handler that wants to read multiple messages can do so. +class HTTPError(Exception): + """This exception is raised for HTTP responses that return an error.""" + def __init__(self, response, _message=None): + status = response.status + reason = response.reason + body = response.body.read() + try: + detail = XML(body).findtext("./messages/msg") + except ParseError as err: + detail = body + message = "HTTP %d %s%s" % ( + status, reason, "" if detail is None else " -- %s" % detail) + Exception.__init__(self, _message or message) + self.status = status + self.reason = reason + self.headers = response.headers + self.body = body + self._response = response + +class AuthenticationError(HTTPError): + """Raised when a login request to Splunk fails. + + If your username was unknown or you provided an incorrect password + in a call to :meth:`Context.login` or :meth:`splunklib.client.Service.login`, + this exception is raised. + """ + def __init__(self, message, cause): + # Put the body back in the response so that HTTPError's constructor can + # read it again. + cause._response.body = BytesIO(cause.body) + + HTTPError.__init__(self, cause._response, message) + +# +# The HTTP interface used by the Splunk binding layer abstracts the underlying +# HTTP library using request & response 'messages' which are implemented as +# dictionaries with the following structure: +# +# # HTTP request message (only method required) +# request { +# method : str, +# headers? : [(str, str)*], +# body? : str, +# } +# +# # HTTP response message (all keys present) +# response { +# status : int, +# reason : str, +# headers : [(str, str)*], +# body : file, +# } +# + +# Encode the given kwargs as a query string. This wrapper will also _encode +# a list value as a sequence of assignemnts to the corresponding arg name, +# for example an argument such as 'foo=[1,2,3]' will be encoded as +# 'foo=1&foo=2&foo=3'. +def _encode(**kwargs): + items = [] + for key, value in six.iteritems(kwargs): + if isinstance(value, list): + items.extend([(key, item) for item in value]) + else: + items.append((key, value)) + return urllib.parse.urlencode(items) + +# Crack the given url into (scheme, host, port, path) +def _spliturl(url): + parsed_url = urllib.parse.urlparse(url) + host = parsed_url.hostname + port = parsed_url.port + path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path + # Strip brackets if its an IPv6 address + if host.startswith('[') and host.endswith(']'): host = host[1:-1] + if port is None: port = DEFAULT_PORT + return parsed_url.scheme, host, port, path + +# Given an HTTP request handler, this wrapper objects provides a related +# family of convenience methods built using that handler. +class HttpLib(object): + """A set of convenient methods for making HTTP calls. + + ``HttpLib`` provides a general :meth:`request` method, and :meth:`delete`, + :meth:`post`, and :meth:`get` methods for the three HTTP methods that Splunk + uses. + + By default, ``HttpLib`` uses Python's built-in ``httplib`` library, + but you can replace it by passing your own handling function to the + constructor for ``HttpLib``. + + The handling function should have the type: + + ``handler(`url`, `request_dict`) -> response_dict`` + + where `url` is the URL to make the request to (including any query and + fragment sections) as a dictionary with the following keys: + + - method: The method for the request, typically ``GET``, ``POST``, or ``DELETE``. + + - headers: A list of pairs specifying the HTTP headers (for example: ``[('key': value), ...]``). + + - body: A string containing the body to send with the request (this string + should default to ''). + + and ``response_dict`` is a dictionary with the following keys: + + - status: An integer containing the HTTP status code (such as 200 or 404). + + - reason: The reason phrase, if any, returned by the server. + + - headers: A list of pairs containing the response headers (for example, ``[('key': value), ...]``). + + - body: A stream-like object supporting ``read(size=None)`` and ``close()`` + methods to get the body of the response. + + The response dictionary is returned directly by ``HttpLib``'s methods with + no further processing. By default, ``HttpLib`` calls the :func:`handler` function + to get a handler function. + + If using the default handler, SSL verification can be disabled by passing verify=False. + """ + def __init__(self, custom_handler=None, verify=False, key_file=None, cert_file=None): + if custom_handler is None: + self.handler = handler(verify=verify, key_file=key_file, cert_file=cert_file) + else: + self.handler = custom_handler + self._cookies = {} + + def delete(self, url, headers=None, **kwargs): + """Sends a DELETE request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). These arguments + are interpreted as the query part of the URL. The order of keyword + arguments is not preserved in the request, but the keywords and + their arguments will be URL encoded. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + if kwargs: + # url is already a UrlEncoded. We have to manually declare + # the query to be encoded or it will get automatically URL + # encoded by being appended to url. + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + message = { + 'method': "DELETE", + 'headers': headers, + } + return self.request(url, message) + + def get(self, url, headers=None, **kwargs): + """Sends a GET request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). These arguments + are interpreted as the query part of the URL. The order of keyword + arguments is not preserved in the request, but the keywords and + their arguments will be URL encoded. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + if kwargs: + # url is already a UrlEncoded. We have to manually declare + # the query to be encoded or it will get automatically URL + # encoded by being appended to url. + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + return self.request(url, { 'method': "GET", 'headers': headers }) + + def post(self, url, headers=None, **kwargs): + """Sends a POST request to a URL. + + :param url: The URL. + :type url: ``string`` + :param headers: A list of pairs specifying the headers for the HTTP + response (for example, ``[('Content-Type': 'text/cthulhu'), ('Token': 'boris')]``). + :type headers: ``list`` + :param kwargs: Additional keyword arguments (optional). If the argument + is ``body``, the value is used as the body for the request, and the + keywords and their arguments will be URL encoded. If there is no + ``body`` keyword argument, all the keyword arguments are encoded + into the body of the request in the format ``x-www-form-urlencoded``. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + if headers is None: headers = [] + + # We handle GET-style arguments and an unstructured body. This is here + # to support the receivers/stream endpoint. + if 'body' in kwargs: + # We only use application/x-www-form-urlencoded if there is no other + # Content-Type header present. This can happen in cases where we + # send requests as application/json, e.g. for KV Store. + if len([x for x in headers if x[0].lower() == "content-type"]) == 0: + headers.append(("Content-Type", "application/x-www-form-urlencoded")) + + body = kwargs.pop('body') + if isinstance(body, dict): + body = _encode(**body).encode('utf-8') + if len(kwargs) > 0: + url = url + UrlEncoded('?' + _encode(**kwargs), skip_encode=True) + else: + body = _encode(**kwargs).encode('utf-8') + message = { + 'method': "POST", + 'headers': headers, + 'body': body + } + return self.request(url, message) + + def request(self, url, message, **kwargs): + """Issues an HTTP request to a URL. + + :param url: The URL. + :type url: ``string`` + :param message: A dictionary with the format as described in + :class:`HttpLib`. + :type message: ``dict`` + :param kwargs: Additional keyword arguments (optional). These arguments + are passed unchanged to the handler. + :type kwargs: ``dict`` + :returns: A dictionary describing the response (see :class:`HttpLib` for + its structure). + :rtype: ``dict`` + """ + response = self.handler(url, message, **kwargs) + response = record(response) + if 400 <= response.status: + raise HTTPError(response) + + # Update the cookie with any HTTP request + # Initially, assume list of 2-tuples + key_value_tuples = response.headers + # If response.headers is a dict, get the key-value pairs as 2-tuples + # this is the case when using urllib2 + if isinstance(response.headers, dict): + key_value_tuples = list(response.headers.items()) + for key, value in key_value_tuples: + if key.lower() == "set-cookie": + _parse_cookies(value, self._cookies) + + return response + + +# Converts an httplib response into a file-like object. +class ResponseReader(io.RawIOBase): + """This class provides a file-like interface for :class:`httplib` responses. + + The ``ResponseReader`` class is intended to be a layer to unify the different + types of HTTP libraries used with this SDK. This class also provides a + preview of the stream and a few useful predicates. + """ + # For testing, you can use a StringIO as the argument to + # ``ResponseReader`` instead of an ``httplib.HTTPResponse``. It + # will work equally well. + def __init__(self, response, connection=None): + self._response = response + self._connection = connection + self._buffer = b'' + + def __str__(self): + return self.read() + + @property + def empty(self): + """Indicates whether there is any more data in the response.""" + return self.peek(1) == b"" + + def peek(self, size): + """Nondestructively retrieves a given number of characters. + + The next :meth:`read` operation behaves as though this method was never + called. + + :param size: The number of characters to retrieve. + :type size: ``integer`` + """ + c = self.read(size) + self._buffer = self._buffer + c + return c + + def close(self): + """Closes this response.""" + if self._connection: + self._connection.close() + self._response.close() + + def read(self, size = None): + """Reads a given number of characters from the response. + + :param size: The number of characters to read, or "None" to read the + entire response. + :type size: ``integer`` or "None" + + """ + r = self._buffer + self._buffer = b'' + if size is not None: + size -= len(r) + r = r + self._response.read(size) + return r + + def readable(self): + """ Indicates that the response reader is readable.""" + return True + + def readinto(self, byte_array): + """ Read data into a byte array, upto the size of the byte array. + + :param byte_array: A byte array/memory view to pour bytes into. + :type byte_array: ``bytearray`` or ``memoryview`` + + """ + max_size = len(byte_array) + data = self.read(max_size) + bytes_read = len(data) + byte_array[:bytes_read] = data + return bytes_read + + +def handler(key_file=None, cert_file=None, timeout=None, verify=False): + """This class returns an instance of the default HTTP request handler using + the values you provide. + + :param `key_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing your private key (optional). + :type key_file: ``string`` + :param `cert_file`: A path to a PEM (Privacy Enhanced Mail) formatted file containing a certificate chain file (optional). + :type cert_file: ``string`` + :param `timeout`: The request time-out period, in seconds (optional). + :type timeout: ``integer`` or "None" + :param `verify`: Set to False to disable SSL verification on https connections. + :type verify: ``Boolean`` + """ + + def connect(scheme, host, port): + kwargs = {} + if timeout is not None: kwargs['timeout'] = timeout + if scheme == "http": + return six.moves.http_client.HTTPConnection(host, port, **kwargs) + if scheme == "https": + if key_file is not None: kwargs['key_file'] = key_file + if cert_file is not None: kwargs['cert_file'] = cert_file + + if not verify: + kwargs['context'] = ssl._create_unverified_context() + return six.moves.http_client.HTTPSConnection(host, port, **kwargs) + raise ValueError("unsupported scheme: %s" % scheme) + + def request(url, message, **kwargs): + scheme, host, port, path = _spliturl(url) + body = message.get("body", "") + head = { + "Content-Length": str(len(body)), + "Host": host, + "User-Agent": "splunk-sdk-python/1.6.16", + "Accept": "*/*", + "Connection": "Close", + } # defaults + for key, value in message["headers"]: + head[key] = value + method = message.get("method", "GET") + + connection = connect(scheme, host, port) + is_keepalive = False + try: + connection.request(method, path, body, head) + if timeout is not None: + connection.sock.settimeout(timeout) + response = connection.getresponse() + is_keepalive = "keep-alive" in response.getheader("connection", default="close").lower() + finally: + if not is_keepalive: + connection.close() + + return { + "status": response.status, + "reason": response.reason, + "headers": response.getheaders(), + "body": ResponseReader(response, connection if is_keepalive else None), + } + + return request diff --git a/bin/splunklib/client.py b/bin/splunklib/client.py new file mode 100644 index 0000000..39b1dcc --- /dev/null +++ b/bin/splunklib/client.py @@ -0,0 +1,3737 @@ +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# The purpose of this module is to provide a friendlier domain interface to +# various Splunk endpoints. The approach here is to leverage the binding +# layer to capture endpoint context and provide objects and methods that +# offer simplified access their corresponding endpoints. The design avoids +# caching resource state. From the perspective of this module, the 'policy' +# for caching resource state belongs in the application or a higher level +# framework, and its the purpose of this module to provide simplified +# access to that resource state. +# +# A side note, the objects below that provide helper methods for updating eg: +# Entity state, are written so that they may be used in a fluent style. +# + +"""The **splunklib.client** module provides a Pythonic interface to the +`Splunk REST API `_, +allowing you programmatically access Splunk's resources. + +**splunklib.client** wraps a Pythonic layer around the wire-level +binding of the **splunklib.binding** module. The core of the library is the +:class:`Service` class, which encapsulates a connection to the server, and +provides access to the various aspects of Splunk's functionality, which are +exposed via the REST API. Typically you connect to a running Splunk instance +with the :func:`connect` function:: + + import splunklib.client as client + service = client.connect(host='localhost', port=8089, + username='admin', password='...') + assert isinstance(service, client.Service) + +:class:`Service` objects have fields for the various Splunk resources (such as apps, +jobs, saved searches, inputs, and indexes). All of these fields are +:class:`Collection` objects:: + + appcollection = service.apps + my_app = appcollection.create('my_app') + my_app = appcollection['my_app'] + appcollection.delete('my_app') + +The individual elements of the collection, in this case *applications*, +are subclasses of :class:`Entity`. An ``Entity`` object has fields for its +attributes, and methods that are specific to each kind of entity. For example:: + + print my_app['author'] # Or: print my_app.author + my_app.package() # Creates a compressed package of this application +""" + +import contextlib +import datetime +import json +import logging +import socket +from datetime import datetime, timedelta +from time import sleep + +from splunklib import six +from splunklib.six.moves import urllib + +from . import data +from .binding import (AuthenticationError, Context, HTTPError, UrlEncoded, + _encode, _make_cookie_header, _NoAuthenticationToken, + namespace) +from .data import record + +__all__ = [ + "connect", + "NotSupportedError", + "OperationError", + "IncomparableException", + "Service", + "namespace" +] + +PATH_APPS = "apps/local/" +PATH_CAPABILITIES = "authorization/capabilities/" +PATH_CONF = "configs/conf-%s/" +PATH_PROPERTIES = "properties/" +PATH_DEPLOYMENT_CLIENTS = "deployment/client/" +PATH_DEPLOYMENT_TENANTS = "deployment/tenants/" +PATH_DEPLOYMENT_SERVERS = "deployment/server/" +PATH_DEPLOYMENT_SERVERCLASSES = "deployment/serverclass/" +PATH_EVENT_TYPES = "saved/eventtypes/" +PATH_FIRED_ALERTS = "alerts/fired_alerts/" +PATH_INDEXES = "data/indexes/" +PATH_INPUTS = "data/inputs/" +PATH_JOBS = "search/jobs/" +PATH_LOGGER = "/services/server/logger/" +PATH_MESSAGES = "messages/" +PATH_MODULAR_INPUTS = "data/modular-inputs" +PATH_ROLES = "authorization/roles/" +PATH_SAVED_SEARCHES = "saved/searches/" +PATH_STANZA = "configs/conf-%s/%s" # (file, stanza) +PATH_USERS = "authentication/users/" +PATH_RECEIVERS_STREAM = "/services/receivers/stream" +PATH_RECEIVERS_SIMPLE = "/services/receivers/simple" +PATH_STORAGE_PASSWORDS = "storage/passwords" + +XNAMEF_ATOM = "{http://www.w3.org/2005/Atom}%s" +XNAME_ENTRY = XNAMEF_ATOM % "entry" +XNAME_CONTENT = XNAMEF_ATOM % "content" + +MATCH_ENTRY_CONTENT = "%s/%s/*" % (XNAME_ENTRY, XNAME_CONTENT) + + +class IllegalOperationException(Exception): + """Thrown when an operation is not possible on the Splunk instance that a + :class:`Service` object is connected to.""" + pass + + +class IncomparableException(Exception): + """Thrown when trying to compare objects (using ``==``, ``<``, ``>``, and + so on) of a type that doesn't support it.""" + pass + + +class AmbiguousReferenceException(ValueError): + """Thrown when the name used to fetch an entity matches more than one entity.""" + pass + + +class InvalidNameException(Exception): + """Thrown when the specified name contains characters that are not allowed + in Splunk entity names.""" + pass + + +class NoSuchCapability(Exception): + """Thrown when the capability that has been referred to doesn't exist.""" + pass + + +class OperationError(Exception): + """Raised for a failed operation, such as a time out.""" + pass + + +class NotSupportedError(Exception): + """Raised for operations that are not supported on a given object.""" + pass + + +def _trailing(template, *targets): + """Substring of *template* following all *targets*. + + **Example**:: + + template = "this is a test of the bunnies." + _trailing(template, "is", "est", "the") == " bunnies" + + Each target is matched successively in the string, and the string + remaining after the last target is returned. If one of the targets + fails to match, a ValueError is raised. + + :param template: Template to extract a trailing string from. + :type template: ``string`` + :param targets: Strings to successively match in *template*. + :type targets: list of ``string``s + :return: Trailing string after all targets are matched. + :rtype: ``string`` + :raises ValueError: Raised when one of the targets does not match. + """ + s = template + for t in targets: + n = s.find(t) + if n == -1: + raise ValueError("Target " + t + " not found in template.") + s = s[n + len(t):] + return s + + +# Filter the given state content record according to the given arg list. +def _filter_content(content, *args): + if len(args) > 0: + return record((k, content[k]) for k in args) + return record((k, v) for k, v in six.iteritems(content) + if k not in ['eai:acl', 'eai:attributes', 'type']) + +# Construct a resource path from the given base path + resource name +def _path(base, name): + if not base.endswith('/'): base = base + '/' + return base + name + + +# Load an atom record from the body of the given response +# this will ultimately be sent to an xml ElementTree so we +# should use the xmlcharrefreplace option +def _load_atom(response, match=None): + return data.load(response.body.read() + .decode('utf-8', 'xmlcharrefreplace'), match) + + +# Load an array of atom entries from the body of the given response +def _load_atom_entries(response): + r = _load_atom(response) + if 'feed' in r: + # Need this to handle a random case in the REST API + if r.feed.get('totalResults') in [0, '0']: + return [] + entries = r.feed.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] + # Unlike most other endpoints, the jobs endpoint does not return + # its state wrapped in another element, but at the top level. + # For example, in XML, it returns ... instead of + # .... + else: + entries = r.get('entry', None) + if entries is None: return None + return entries if isinstance(entries, list) else [entries] + + +# Load the sid from the body of the given response +def _load_sid(response): + return _load_atom(response).response.sid + + +# Parse the given atom entry record into a generic entity state record +def _parse_atom_entry(entry): + title = entry.get('title', None) + + elink = entry.get('link', []) + elink = elink if isinstance(elink, list) else [elink] + links = record((link.rel, link.href) for link in elink) + + # Retrieve entity content values + content = entry.get('content', {}) + + # Host entry metadata + metadata = _parse_atom_metadata(content) + + # Filter some of the noise out of the content record + content = record((k, v) for k, v in six.iteritems(content) + if k not in ['eai:acl', 'eai:attributes']) + + if 'type' in content: + if isinstance(content['type'], list): + content['type'] = [t for t in content['type'] if t != 'text/xml'] + # Unset type if it was only 'text/xml' + if len(content['type']) == 0: + content.pop('type', None) + # Flatten 1 element list + if len(content['type']) == 1: + content['type'] = content['type'][0] + else: + content.pop('type', None) + + return record({ + 'title': title, + 'links': links, + 'access': metadata.access, + 'fields': metadata.fields, + 'content': content, + 'updated': entry.get("updated") + }) + + +# Parse the metadata fields out of the given atom entry content record +def _parse_atom_metadata(content): + # Hoist access metadata + access = content.get('eai:acl', None) + + # Hoist content metadata (and cleanup some naming) + attributes = content.get('eai:attributes', {}) + fields = record({ + 'required': attributes.get('requiredFields', []), + 'optional': attributes.get('optionalFields', []), + 'wildcard': attributes.get('wildcardFields', [])}) + + return record({'access': access, 'fields': fields}) + +# kwargs: scheme, host, port, app, owner, username, password +def connect(**kwargs): + """This function connects and logs in to a Splunk instance. + + This function is a shorthand for :meth:`Service.login`. + The ``connect`` function makes one round trip to the server (for logging in). + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verrification for + https connections. (optional, the default is True) + :type verify: ``Boolean`` + :param `owner`: The owner context of the namespace (optional). + :type owner: ``string`` + :param `app`: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (the default is "user"). + :type sharing: "global", "system", "app", or "user" + :param `token`: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param autologin: When ``True``, automatically tries to log in again if the + session terminates. + :type autologin: ``boolean`` + :param `username`: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param `password`: The password for the Splunk account. + :type password: ``string`` + :return: An initialized :class:`Service` connection. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + a = s.apps["my_app"] + ... + """ + s = Service(**kwargs) + s.login() + return s + + +# In preparation for adding Storm support, we added an +# intermediary class between Service and Context. Storm's +# API is not going to be the same as enterprise Splunk's +# API, so we will derive both Service (for enterprise Splunk) +# and StormService for (Splunk Storm) from _BaseService, and +# put any shared behavior on it. +class _BaseService(Context): + pass + + +class Service(_BaseService): + """A Pythonic binding to Splunk instances. + + A :class:`Service` represents a binding to a Splunk instance on an + HTTP or HTTPS port. It handles the details of authentication, wire + formats, and wraps the REST API endpoints into something more + Pythonic. All of the low-level operations on the instance from + :class:`splunklib.binding.Context` are also available in case you need + to do something beyond what is provided by this class. + + After creating a ``Service`` object, you must call its :meth:`login` + method before you can issue requests to Splunk. + Alternately, use the :func:`connect` function to create an already + authenticated :class:`Service` object, or provide a session token + when creating the :class:`Service` object explicitly (the same + token may be shared by multiple :class:`Service` objects). + + :param host: The host name (the default is "localhost"). + :type host: ``string`` + :param port: The port number (the default is 8089). + :type port: ``integer`` + :param scheme: The scheme for accessing the service (the default is "https"). + :type scheme: "https" or "http" + :param verify: Enable (True) or disable (False) SSL verrification for + https connections. (optional, the default is True) + :type verify: ``Boolean`` + :param `owner`: The owner context of the namespace (optional; use "-" for wildcard). + :type owner: ``string`` + :param `app`: The app context of the namespace (optional; use "-" for wildcard). + :type app: ``string`` + :param `token`: The current session token (optional). Session tokens can be + shared across multiple service instances. + :type token: ``string`` + :param cookie: A session cookie. When provided, you don't need to call :meth:`login`. + This parameter is only supported for Splunk 6.2+. + :type cookie: ``string`` + :param `username`: The Splunk account username, which is used to + authenticate the Splunk instance. + :type username: ``string`` + :param `password`: The password, which is used to authenticate the Splunk + instance. + :type password: ``string`` + :return: A :class:`Service` instance. + + **Example**:: + + import splunklib.client as client + s = client.Service(username="boris", password="natasha", ...) + s.login() + # Or equivalently + s = client.connect(username="boris", password="natasha") + # Or if you already have a session token + s = client.Service(token="atg232342aa34324a") + # Or if you already have a valid cookie + s = client.Service(cookie="splunkd_8089=...") + """ + def __init__(self, **kwargs): + super(Service, self).__init__(**kwargs) + self._splunk_version = None + + @property + def apps(self): + """Returns the collection of applications that are installed on this instance of Splunk. + + :return: A :class:`Collection` of :class:`Application` entities. + """ + return Collection(self, PATH_APPS, item=Application) + + @property + def confs(self): + """Returns the collection of configuration files for this Splunk instance. + + :return: A :class:`Configurations` collection of + :class:`ConfigurationFile` entities. + """ + return Configurations(self) + + @property + def capabilities(self): + """Returns the list of system capabilities. + + :return: A ``list`` of capabilities. + """ + response = self.get(PATH_CAPABILITIES) + return _load_atom(response, MATCH_ENTRY_CONTENT).capabilities + + @property + def event_types(self): + """Returns the collection of event types defined in this Splunk instance. + + :return: An :class:`Entity` containing the event types. + """ + return Collection(self, PATH_EVENT_TYPES) + + @property + def fired_alerts(self): + """Returns the collection of alerts that have been fired on the Splunk + instance, grouped by saved search. + + :return: A :class:`Collection` of :class:`AlertGroup` entities. + """ + return Collection(self, PATH_FIRED_ALERTS, item=AlertGroup) + + @property + def indexes(self): + """Returns the collection of indexes for this Splunk instance. + + :return: An :class:`Indexes` collection of :class:`Index` entities. + """ + return Indexes(self, PATH_INDEXES, item=Index) + + @property + def info(self): + """Returns the information about this instance of Splunk. + + :return: The system information, as key-value pairs. + :rtype: ``dict`` + """ + response = self.get("/services/server/info") + return _filter_content(_load_atom(response, MATCH_ENTRY_CONTENT)) + + def input(self, path, kind=None): + """Retrieves an input by path, and optionally kind. + + :return: A :class:`Input` object. + """ + return Input(self, path, kind=kind).refresh() + + @property + def inputs(self): + """Returns the collection of inputs configured on this Splunk instance. + + :return: An :class:`Inputs` collection of :class:`Input` entities. + """ + return Inputs(self) + + def job(self, sid): + """Retrieves a search job by sid. + + :return: A :class:`Job` object. + """ + return Job(self, sid).refresh() + + @property + def jobs(self): + """Returns the collection of current search jobs. + + :return: A :class:`Jobs` collection of :class:`Job` entities. + """ + return Jobs(self) + + @property + def loggers(self): + """Returns the collection of logging level categories and their status. + + :return: A :class:`Loggers` collection of logging levels. + """ + return Loggers(self) + + @property + def messages(self): + """Returns the collection of service messages. + + :return: A :class:`Collection` of :class:`Message` entities. + """ + return Collection(self, PATH_MESSAGES, item=Message) + + @property + def modular_input_kinds(self): + """Returns the collection of the modular input kinds on this Splunk instance. + + :return: A :class:`ReadOnlyCollection` of :class:`ModularInputKind` entities. + """ + if self.splunk_version >= (5,): + return ReadOnlyCollection(self, PATH_MODULAR_INPUTS, item=ModularInputKind) + else: + raise IllegalOperationException("Modular inputs are not supported before Splunk version 5.") + + @property + def storage_passwords(self): + """Returns the collection of the storage passwords on this Splunk instance. + + :return: A :class:`ReadOnlyCollection` of :class:`StoragePasswords` entities. + """ + return StoragePasswords(self) + + # kwargs: enable_lookups, reload_macros, parse_only, output_mode + def parse(self, query, **kwargs): + """Parses a search query and returns a semantic map of the search. + + :param query: The search query to parse. + :type query: ``string`` + :param kwargs: Arguments to pass to the ``search/parser`` endpoint + (optional). Valid arguments are: + + * "enable_lookups" (``boolean``): If ``True``, performs reverse lookups + to expand the search expression. + + * "output_mode" (``string``): The output format (XML or JSON). + + * "parse_only" (``boolean``): If ``True``, disables the expansion of + search due to evaluation of subsearches, time term expansion, + lookups, tags, eventtypes, and sourcetype alias. + + * "reload_macros" (``boolean``): If ``True``, reloads macro + definitions from macros.conf. + + :type kwargs: ``dict`` + :return: A semantic map of the parsed search query. + """ + return self.get("search/parser", q=query, **kwargs) + + def restart(self, timeout=None): + """Restarts this Splunk instance. + + The service is unavailable until it has successfully restarted. + + If a *timeout* value is specified, ``restart`` blocks until the service + resumes or the timeout period has been exceeded. Otherwise, ``restart`` returns + immediately. + + :param timeout: A timeout period, in seconds. + :type timeout: ``integer`` + """ + msg = { "value": "Restart requested by " + self.username + "via the Splunk SDK for Python"} + # This message will be deleted once the server actually restarts. + self.messages.create(name="restart_required", **msg) + result = self.post("/services/server/control/restart") + if timeout is None: + return result + start = datetime.now() + diff = timedelta(seconds=timeout) + while datetime.now() - start < diff: + try: + self.login() + if not self.restart_required: + return result + except Exception as e: + sleep(1) + raise Exception("Operation time out.") + + @property + def restart_required(self): + """Indicates whether splunkd is in a state that requires a restart. + + :return: A ``boolean`` that indicates whether a restart is required. + + """ + response = self.get("messages").body.read() + messages = data.load(response)['feed'] + if 'entry' not in messages: + result = False + else: + if isinstance(messages['entry'], dict): + titles = [messages['entry']['title']] + else: + titles = [x['title'] for x in messages['entry']] + result = 'restart_required' in titles + return result + + @property + def roles(self): + """Returns the collection of user roles. + + :return: A :class:`Roles` collection of :class:`Role` entities. + """ + return Roles(self) + + def search(self, query, **kwargs): + """Runs a search using a search query and any optional arguments you + provide, and returns a `Job` object representing the search. + + :param query: A search query. + :type query: ``string`` + :param kwargs: Arguments for the search (optional): + + * "output_mode" (``string``): Specifies the output format of the + results. + + * "earliest_time" (``string``): Specifies the earliest time in the + time range to + search. The time string can be a UTC time (with fractional + seconds), a relative time specifier (to now), or a formatted + time string. + + * "latest_time" (``string``): Specifies the latest time in the time + range to + search. The time string can be a UTC time (with fractional + seconds), a relative time specifier (to now), or a formatted + time string. + + * "rf" (``string``): Specifies one or more fields to add to the + search. + + :type kwargs: ``dict`` + :rtype: class:`Job` + :returns: An object representing the created job. + """ + return self.jobs.create(query, **kwargs) + + @property + def saved_searches(self): + """Returns the collection of saved searches. + + :return: A :class:`SavedSearches` collection of :class:`SavedSearch` + entities. + """ + return SavedSearches(self) + + @property + def settings(self): + """Returns the configuration settings for this instance of Splunk. + + :return: A :class:`Settings` object containing configuration settings. + """ + return Settings(self) + + @property + def splunk_version(self): + """Returns the version of the splunkd instance this object is attached + to. + + The version is returned as a tuple of the version components as + integers (for example, `(4,3,3)` or `(5,)`). + + :return: A ``tuple`` of ``integers``. + """ + if self._splunk_version is None: + self._splunk_version = tuple([int(p) for p in self.info['version'].split('.')]) + return self._splunk_version + + @property + def kvstore(self): + """Returns the collection of KV Store collections. + + :return: A :class:`KVStoreCollections` collection of :class:`KVStoreCollection` entities. + """ + return KVStoreCollections(self) + + @property + def users(self): + """Returns the collection of users. + + :return: A :class:`Users` collection of :class:`User` entities. + """ + return Users(self) + + +class Endpoint(object): + """This class represents individual Splunk resources in the Splunk REST API. + + An ``Endpoint`` object represents a URI, such as ``/services/saved/searches``. + This class provides the common functionality of :class:`Collection` and + :class:`Entity` (essentially HTTP GET and POST methods). + """ + def __init__(self, service, path): + self.service = service + self.path = path if path.endswith('/') else path + '/' + + def get(self, path_segment="", owner=None, app=None, sharing=None, **query): + """Performs a GET operation on the path segment relative to this endpoint. + + This method is named to match the HTTP method. This method makes at least + one roundtrip to the server, one additional round trip for + each 303 status returned, plus at most two additional round + trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (optional). + :type sharing: "global", "system", "app", or "user" + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + apps = s.apps + apps.get() == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + apps.get('nonexistant/path') # raises HTTPError + s.logout() + apps.get() # raises AuthenticationError + """ + # self.path to the Endpoint is relative in the SDK, so passing + # owner, app, sharing, etc. along will produce the correct + # namespace in the final request. + if path_segment.startswith('/'): + path = path_segment + else: + path = self.service._abspath(self.path + path_segment, owner=owner, + app=app, sharing=sharing) + # ^-- This was "%s%s" % (self.path, path_segment). + # That doesn't work, because self.path may be UrlEncoded. + return self.service.get(path, + owner=owner, app=app, sharing=sharing, + **query) + + def post(self, path_segment="", owner=None, app=None, sharing=None, **query): + """Performs a POST operation on the path segment relative to this endpoint. + + This method is named to match the HTTP method. This method makes at least + one roundtrip to the server, one additional round trip for + each 303 status returned, plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode of the namespace (optional). + :type sharing: ``string`` + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + apps = s.apps + apps.post(name='boris') == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '2908'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 18:34:50 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'Created', + 'status': 201} + apps.get('nonexistant/path') # raises HTTPError + s.logout() + apps.get() # raises AuthenticationError + """ + if path_segment.startswith('/'): + path = path_segment + else: + path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) + return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) + + +# kwargs: path, app, owner, sharing, state +class Entity(Endpoint): + """This class is a base class for Splunk entities in the REST API, such as + saved searches, jobs, indexes, and inputs. + + ``Entity`` provides the majority of functionality required by entities. + Subclasses only implement the special cases for individual entities. + For example for deployment serverclasses, the subclass makes whitelists and + blacklists into Python lists. + + An ``Entity`` is addressed like a dictionary, with a few extensions, + so the following all work:: + + ent['email.action'] + ent['disabled'] + ent['whitelist'] + + Many endpoints have values that share a prefix, such as + ``email.to``, ``email.action``, and ``email.subject``. You can extract + the whole fields, or use the key ``email`` to get a dictionary of + all the subelements. That is, ``ent['email']`` returns a + dictionary with the keys ``to``, ``action``, ``subject``, and so on. If + there are multiple levels of dots, each level is made into a + subdictionary, so ``email.body.salutation`` can be accessed at + ``ent['email']['body']['salutation']`` or + ``ent['email.body.salutation']``. + + You can also access the fields as though they were the fields of a Python + object, as in:: + + ent.email.action + ent.disabled + ent.whitelist + + However, because some of the field names are not valid Python identifiers, + the dictionary-like syntax is preferrable. + + The state of an :class:`Entity` object is cached, so accessing a field + does not contact the server. If you think the values on the + server have changed, call the :meth:`Entity.refresh` method. + """ + # Not every endpoint in the API is an Entity or a Collection. For + # example, a saved search at saved/searches/{name} has an additional + # method saved/searches/{name}/scheduled_times, but this isn't an + # entity in its own right. In these cases, subclasses should + # implement a method that uses the get and post methods inherited + # from Endpoint, calls the _load_atom function (it's elsewhere in + # client.py, but not a method of any object) to read the + # information, and returns the extracted data in a Pythonesque form. + # + # The primary use of subclasses of Entity is to handle specially + # named fields in the Entity. If you only need to provide a default + # value for an optional field, subclass Entity and define a + # dictionary ``defaults``. For instance,:: + # + # class Hypothetical(Entity): + # defaults = {'anOptionalField': 'foo', + # 'anotherField': 'bar'} + # + # If you have to do more than provide a default, such as rename or + # actually process values, then define a new method with the + # ``@property`` decorator. + # + # class Hypothetical(Entity): + # @property + # def foobar(self): + # return self.content['foo'] + "-" + self.content["bar"] + + # Subclasses can override defaults the default values for + # optional fields. See above. + defaults = {} + + def __init__(self, service, path, **kwargs): + Endpoint.__init__(self, service, path) + self._state = None + if not kwargs.get('skip_refresh', False): + self.refresh(kwargs.get('state', None)) # "Prefresh" + return + + def __contains__(self, item): + try: + self[item] + return True + except (KeyError, AttributeError): + return False + + def __eq__(self, other): + """Raises IncomparableException. + + Since Entity objects are snapshots of times on the server, no + simple definition of equality will suffice beyond instance + equality, and instance equality leads to strange situations + such as:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + x = saved_searches['asearch'] + + but then ``x != saved_searches['asearch']``. + + whether or not there was a change on the server. Rather than + try to do something fancy, we simple declare that equality is + undefined for Entities. + + Makes no roundtrips to the server. + """ + raise IncomparableException( + "Equality is undefined for objects of class %s" % \ + self.__class__.__name__) + + def __getattr__(self, key): + # Called when an attribute was not found by the normal method. In this + # case we try to find it in self.content and then self.defaults. + if key in self.state.content: + return self.state.content[key] + elif key in self.defaults: + return self.defaults[key] + else: + raise AttributeError(key) + + def __getitem__(self, key): + # getattr attempts to find a field on the object in the normal way, + # then calls __getattr__ if it cannot. + return getattr(self, key) + + # Load the Atom entry record from the given response - this is a method + # because the "entry" record varies slightly by entity and this allows + # for a subclass to override and handle any special cases. + def _load_atom_entry(self, response): + elem = _load_atom(response, XNAME_ENTRY) + if isinstance(elem, list): + raise AmbiguousReferenceException("Fetch from server returned multiple entries for name %s." % self.name) + else: + return elem.entry + + # Load the entity state record from the given response + def _load_state(self, response): + entry = self._load_atom_entry(response) + return _parse_atom_entry(entry) + + def _run_action(self, path_segment, **kwargs): + """Run a method and return the content Record from the returned XML. + + A method is a relative path from an Entity that is not itself + an Entity. _run_action assumes that the returned XML is an + Atom field containing one Entry, and the contents of Entry is + what should be the return value. This is right in enough cases + to make this method useful. + """ + response = self.get(path_segment, **kwargs) + data = self._load_atom_entry(response) + rec = _parse_atom_entry(data) + return rec.content + + def _proper_namespace(self, owner=None, app=None, sharing=None): + """Produce a namespace sans wildcards for use in entity requests. + + This method tries to fill in the fields of the namespace which are `None` + or wildcard (`'-'`) from the entity's namespace. If that fails, it uses + the service's namespace. + + :param owner: + :param app: + :param sharing: + :return: + """ + if owner is None and app is None and sharing is None: # No namespace provided + if self._state is not None and 'access' in self._state: + return (self._state.access.owner, + self._state.access.app, + self._state.access.sharing) + else: + return (self.service.namespace['owner'], + self.service.namespace['app'], + self.service.namespace['sharing']) + else: + return (owner,app,sharing) + + def delete(self): + owner, app, sharing = self._proper_namespace() + return self.service.delete(self.path, owner=owner, app=app, sharing=sharing) + + def get(self, path_segment="", owner=None, app=None, sharing=None, **query): + owner, app, sharing = self._proper_namespace(owner, app, sharing) + return super(Entity, self).get(path_segment, owner=owner, app=app, sharing=sharing, **query) + + def post(self, path_segment="", owner=None, app=None, sharing=None, **query): + owner, app, sharing = self._proper_namespace(owner, app, sharing) + return super(Entity, self).post(path_segment, owner=owner, app=app, sharing=sharing, **query) + + def refresh(self, state=None): + """Refreshes the state of this entity. + + If *state* is provided, load it as the new state for this + entity. Otherwise, make a roundtrip to the server (by calling + the :meth:`read` method of ``self``) to fetch an updated state, + plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param state: Entity-specific arguments (optional). + :type state: ``dict`` + :raises EntityDeletedException: Raised if the entity no longer exists on + the server. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + search = s.apps['search'] + search.refresh() + """ + if state is not None: + self._state = state + else: + self._state = self.read(self.get()) + return self + + @property + def access(self): + """Returns the access metadata for this entity. + + :return: A :class:`splunklib.data.Record` object with three keys: + ``owner``, ``app``, and ``sharing``. + """ + return self.state.access + + @property + def content(self): + """Returns the contents of the entity. + + :return: A ``dict`` containing values. + """ + return self.state.content + + def disable(self): + """Disables the entity at this endpoint.""" + self.post("disable") + if self.service.restart_required: + self.service.restart(120) + return self + + def enable(self): + """Enables the entity at this endpoint.""" + self.post("enable") + return self + + @property + def fields(self): + """Returns the content metadata for this entity. + + :return: A :class:`splunklib.data.Record` object with three keys: + ``required``, ``optional``, and ``wildcard``. + """ + return self.state.fields + + @property + def links(self): + """Returns a dictionary of related resources. + + :return: A ``dict`` with keys and corresponding URLs. + """ + return self.state.links + + @property + def name(self): + """Returns the entity name. + + :return: The entity name. + :rtype: ``string`` + """ + return self.state.title + + def read(self, response): + """ Reads the current state of the entity from the server. """ + results = self._load_state(response) + # In lower layers of the SDK, we end up trying to URL encode + # text to be dispatched via HTTP. However, these links are already + # URL encoded when they arrive, and we need to mark them as such. + unquoted_links = dict([(k, UrlEncoded(v, skip_encode=True)) + for k,v in six.iteritems(results['links'])]) + results['links'] = unquoted_links + return results + + def reload(self): + """Reloads the entity.""" + self.post("_reload") + return self + + @property + def state(self): + """Returns the entity's state record. + + :return: A ``dict`` containing fields and metadata for the entity. + """ + if self._state is None: self.refresh() + return self._state + + def update(self, **kwargs): + """Updates the server with any changes you've made to the current entity + along with any additional arguments you specify. + + **Note**: You cannot update the ``name`` field of an entity. + + Many of the fields in the REST API are not valid Python + identifiers, which means you cannot pass them as keyword + arguments. That is, Python will fail to parse the following:: + + # This fails + x.update(check-new=False, email.to='boris@utopia.net') + + However, you can always explicitly use a dictionary to pass + such keys:: + + # This works + x.update(**{'check-new': False, 'email.to': 'boris@utopia.net'}) + + :param kwargs: Additional entity-specific arguments (optional). + :type kwargs: ``dict`` + + :return: The entity this method is called on. + :rtype: class:`Entity` + """ + # The peculiarity in question: the REST API creates a new + # Entity if we pass name in the dictionary, instead of the + # expected behavior of updating this Entity. Therefore we + # check for 'name' in kwargs and throw an error if it is + # there. + if 'name' in kwargs: + raise IllegalOperationException('Cannot update the name of an Entity via the REST API.') + self.post(**kwargs) + return self + + +class ReadOnlyCollection(Endpoint): + """This class represents a read-only collection of entities in the Splunk + instance. + """ + def __init__(self, service, path, item=Entity): + Endpoint.__init__(self, service, path) + self.item = item # Item accessor + self.null_count = -1 + + def __contains__(self, name): + """Is there at least one entry called *name* in this collection? + + Makes a single roundtrip to the server, plus at most two more + if + the ``autologin`` field of :func:`connect` is set to ``True``. + """ + try: + self[name] + return True + except KeyError: + return False + except AmbiguousReferenceException: + return True + + def __getitem__(self, key): + """Fetch an item named *key* from this collection. + + A name is not a unique identifier in a collection. The unique + identifier is a name plus a namespace. For example, there can + be a saved search named ``'mysearch'`` with sharing ``'app'`` + in application ``'search'``, and another with sharing + ``'user'`` with owner ``'boris'`` and application + ``'search'``. If the ``Collection`` is attached to a + ``Service`` that has ``'-'`` (wildcard) as user and app in its + namespace, then both of these may be visible under the same + name. + + Where there is no conflict, ``__getitem__`` will fetch the + entity given just the name. If there is a conflict and you + pass just a name, it will raise a ``ValueError``. In that + case, add the namespace as a second argument. + + This function makes a single roundtrip to the server, plus at + most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param key: The name to fetch, or a tuple (name, namespace). + :return: An :class:`Entity` object. + :raises KeyError: Raised if *key* does not exist. + :raises ValueError: Raised if no namespace is specified and *key* + does not refer to a unique name. + + **Example**:: + + s = client.connect(...) + saved_searches = s.saved_searches + x1 = saved_searches.create( + 'mysearch', 'search * | head 1', + owner='admin', app='search', sharing='app') + x2 = saved_searches.create( + 'mysearch', 'search * | head 1', + owner='admin', app='search', sharing='user') + # Raises ValueError: + saved_searches['mysearch'] + # Fetches x1 + saved_searches[ + 'mysearch', + client.namespace(sharing='app', app='search')] + # Fetches x2 + saved_searches[ + 'mysearch', + client.namespace(sharing='user', owner='boris', app='search')] + """ + try: + if isinstance(key, tuple) and len(key) == 2: + # x[a,b] is translated to x.__getitem__( (a,b) ), so we + # have to extract values out. + key, ns = key + key = UrlEncoded(key, encode_slash=True) + response = self.get(key, owner=ns.owner, app=ns.app) + else: + key = UrlEncoded(key, encode_slash=True) + response = self.get(key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException("Found multiple entities named '%s'; please specify a namespace." % key) + elif len(entries) == 0: + raise KeyError(key) + else: + return entries[0] + except HTTPError as he: + if he.status == 404: # No entity matching key and namespace. + raise KeyError(key) + else: + raise + + def __iter__(self, **kwargs): + """Iterate over the entities in the collection. + + :param kwargs: Additional arguments. + :type kwargs: ``dict`` + :rtype: iterator over entities. + + Implemented to give Collection a listish interface. This + function always makes a roundtrip to the server, plus at most + two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + for entity in saved_searches: + print "Saved search named %s" % entity.name + """ + + for item in self.iter(**kwargs): + yield item + + def __len__(self): + """Enable ``len(...)`` for ``Collection`` objects. + + Implemented for consistency with a listish interface. No + further failure modes beyond those possible for any method on + an Endpoint. + + This function always makes a round trip to the server, plus at + most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + n = len(saved_searches) + """ + return len(self.list()) + + def _entity_path(self, state): + """Calculate the path to an entity to be returned. + + *state* should be the dictionary returned by + :func:`_parse_atom_entry`. :func:`_entity_path` extracts the + link to this entity from *state*, and strips all the namespace + prefixes from it to leave only the relative path of the entity + itself, sans namespace. + + :rtype: ``string`` + :return: an absolute path + """ + # This has been factored out so that it can be easily + # overloaded by Configurations, which has to switch its + # entities' endpoints from its own properties/ to configs/. + raw_path = urllib.parse.unquote(state.links.alternate) + if 'servicesNS/' in raw_path: + return _trailing(raw_path, 'servicesNS/', '/', '/') + elif 'services/' in raw_path: + return _trailing(raw_path, 'services/') + else: + return raw_path + + def _load_list(self, response): + """Converts *response* to a list of entities. + + *response* is assumed to be a :class:`Record` containing an + HTTP response, of the form:: + + {'status': 200, + 'headers': [('content-length', '232642'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Tue, 29 May 2012 15:27:08 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'body': ...a stream implementing .read()...} + + The ``'body'`` key refers to a stream containing an Atom feed, + that is, an XML document with a toplevel element ````, + and within that element one or more ```` elements. + """ + # Some subclasses of Collection have to override this because + # splunkd returns something that doesn't match + # . + entries = _load_atom_entries(response) + if entries is None: return [] + entities = [] + for entry in entries: + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + entities.append(entity) + + return entities + + def itemmeta(self): + """Returns metadata for members of the collection. + + Makes a single roundtrip to the server, plus two more at most if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :return: A :class:`splunklib.data.Record` object containing the metadata. + + **Example**:: + + import splunklib.client as client + import pprint + s = client.connect(...) + pprint.pprint(s.apps.itemmeta()) + {'access': {'app': 'search', + 'can_change_perms': '1', + 'can_list': '1', + 'can_share_app': '1', + 'can_share_global': '1', + 'can_share_user': '1', + 'can_write': '1', + 'modifiable': '1', + 'owner': 'admin', + 'perms': {'read': ['*'], 'write': ['admin']}, + 'removable': '0', + 'sharing': 'user'}, + 'fields': {'optional': ['author', + 'configured', + 'description', + 'label', + 'manageable', + 'template', + 'visible'], + 'required': ['name'], 'wildcard': []}} + """ + response = self.get("_new") + content = _load_atom(response, MATCH_ENTRY_CONTENT) + return _parse_atom_metadata(content) + + def iter(self, offset=0, count=None, pagesize=None, **kwargs): + """Iterates over the collection. + + This method is equivalent to the :meth:`list` method, but + it returns an iterator and can load a certain number of entities at a + time from the server. + + :param offset: The index of the first entity to return (optional). + :type offset: ``integer`` + :param count: The maximum number of entities to return (optional). + :type count: ``integer`` + :param pagesize: The number of entities to load (optional). + :type pagesize: ``integer`` + :param kwargs: Additional arguments (optional): + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + for saved_search in s.saved_searches.iter(pagesize=10): + # Loads 10 saved searches at a time from the + # server. + ... + """ + assert pagesize is None or pagesize > 0 + if count is None: + count = self.null_count + fetched = 0 + while count == self.null_count or fetched < count: + response = self.get(count=pagesize or count, offset=offset, **kwargs) + items = self._load_list(response) + N = len(items) + fetched += N + for item in items: + yield item + if pagesize is None or N < pagesize: + break + offset += N + logging.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) + + # kwargs: count, offset, search, sort_dir, sort_key, sort_mode + def list(self, count=None, **kwargs): + """Retrieves a list of entities in this collection. + + The entire collection is loaded at once and is returned as a list. This + function makes a single roundtrip to the server, plus at most two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + There is no caching--every call makes at least one round trip. + + :param count: The maximum number of entities to return (optional). + :type count: ``integer`` + :param kwargs: Additional arguments (optional): + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + :return: A ``list`` of entities. + """ + # response = self.get(count=count, **kwargs) + # return self._load_list(response) + return list(self.iter(count=count, **kwargs)) + + + + +class Collection(ReadOnlyCollection): + """A collection of entities. + + Splunk provides a number of different collections of distinct + entity types: applications, saved searches, fired alerts, and a + number of others. Each particular type is available separately + from the Splunk instance, and the entities of that type are + returned in a :class:`Collection`. + + The interface for :class:`Collection` does not quite match either + ``list`` or ``dict`` in Python, because there are enough semantic + mismatches with either to make its behavior surprising. A unique + element in a :class:`Collection` is defined by a string giving its + name plus namespace (although the namespace is optional if the name is + unique). + + **Example**:: + + import splunklib.client as client + service = client.connect(...) + mycollection = service.saved_searches + mysearch = mycollection['my_search', client.namespace(owner='boris', app='natasha', sharing='user')] + # Or if there is only one search visible named 'my_search' + mysearch = mycollection['my_search'] + + Similarly, ``name`` in ``mycollection`` works as you might expect (though + you cannot currently pass a namespace to the ``in`` operator), as does + ``len(mycollection)``. + + However, as an aggregate, :class:`Collection` behaves more like a + list. If you iterate over a :class:`Collection`, you get an + iterator over the entities, not the names and namespaces. + + **Example**:: + + for entity in mycollection: + assert isinstance(entity, client.Entity) + + Use the :meth:`create` and :meth:`delete` methods to create and delete + entities in this collection. To view the access control list and other + metadata of the collection, use the :meth:`ReadOnlyCollection.itemmeta` method. + + :class:`Collection` does no caching. Each call makes at least one + round trip to the server to fetch data. + """ + + def create(self, name, **params): + """Creates a new entity in this collection. + + This function makes either one or two roundtrips to the + server, depending on the type of entities in this + collection, plus at most two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param name: The name of the entity to create. + :type name: ``string`` + :param namespace: A namespace, as created by the :func:`splunklib.binding.namespace` + function (optional). You can also set ``owner``, ``app``, and + ``sharing`` in ``params``. + :type namespace: A :class:`splunklib.data.Record` object with keys ``owner``, ``app``, + and ``sharing``. + :param params: Additional entity-specific arguments (optional). + :type params: ``dict`` + :return: The new entity. + :rtype: A subclass of :class:`Entity`, chosen by :meth:`Collection.self.item`. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + applications = s.apps + new_app = applications.create("my_fake_app") + """ + if not isinstance(name, six.string_types): + raise InvalidNameException("%s is not a valid name for an entity." % name) + if 'namespace' in params: + namespace = params.pop('namespace') + params['owner'] = namespace.owner + params['app'] = namespace.app + params['sharing'] = namespace.sharing + response = self.post(name=name, **params) + atom = _load_atom(response, XNAME_ENTRY) + if atom is None: + # This endpoint doesn't return the content of the new + # item. We have to go fetch it ourselves. + return self[name] + else: + entry = atom.entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + self._entity_path(state), + state=state) + return entity + + def delete(self, name, **params): + """Deletes a specified entity from the collection. + + :param name: The name of the entity to delete. + :type name: ``string`` + :return: The collection. + :rtype: ``self`` + + This method is implemented for consistency with the REST API's DELETE + method. + + If there is no *name* entity on the server, a ``KeyError`` is + thrown. This function always makes a roundtrip to the server. + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + saved_searches = c.saved_searches + saved_searches.create('my_saved_search', + 'search * | head 1') + assert 'my_saved_search' in saved_searches + saved_searches.delete('my_saved_search') + assert 'my_saved_search' not in saved_searches + """ + name = UrlEncoded(name, encode_slash=True) + if 'namespace' in params: + namespace = params.pop('namespace') + params['owner'] = namespace.owner + params['app'] = namespace.app + params['sharing'] = namespace.sharing + try: + self.service.delete(_path(self.path, name), **params) + except HTTPError as he: + # An HTTPError with status code 404 means that the entity + # has already been deleted, and we reraise it as a + # KeyError. + if he.status == 404: + raise KeyError("No such entity %s" % name) + else: + raise + return self + + def get(self, name="", owner=None, app=None, sharing=None, **query): + """Performs a GET request to the server on the collection. + + If *owner*, *app*, and *sharing* are omitted, this method takes a + default namespace from the :class:`Service` object for this :class:`Endpoint`. + All other keyword arguments are included in the URL as query parameters. + + :raises AuthenticationError: Raised when the ``Service`` is not logged in. + :raises HTTPError: Raised when an error in the request occurs. + :param path_segment: A path segment relative to this endpoint. + :type path_segment: ``string`` + :param owner: The owner context of the namespace (optional). + :type owner: ``string`` + :param app: The app context of the namespace (optional). + :type app: ``string`` + :param sharing: The sharing mode for the namespace (optional). + :type sharing: "global", "system", "app", or "user" + :param query: All other keyword arguments, which are used as query + parameters. + :type query: ``string`` + :return: The response from the server. + :rtype: ``dict`` with keys ``body``, ``headers``, ``reason``, + and ``status`` + + **Example**:: + + import splunklib.client + s = client.service(...) + saved_searches = s.saved_searches + saved_searches.get("my/saved/search") == \\ + {'body': ...a response reader object..., + 'headers': [('content-length', '26208'), + ('expires', 'Fri, 30 Oct 1998 00:00:00 GMT'), + ('server', 'Splunkd'), + ('connection', 'close'), + ('cache-control', 'no-store, max-age=0, must-revalidate, no-cache'), + ('date', 'Fri, 11 May 2012 16:30:35 GMT'), + ('content-type', 'text/xml; charset=utf-8')], + 'reason': 'OK', + 'status': 200} + saved_searches.get('nonexistant/search') # raises HTTPError + s.logout() + saved_searches.get() # raises AuthenticationError + + """ + name = UrlEncoded(name, encode_slash=True) + return super(Collection, self).get(name, owner, app, sharing, **query) + + + + +class ConfigurationFile(Collection): + """This class contains all of the stanzas from one configuration file. + """ + # __init__'s arguments must match those of an Entity, not a + # Collection, since it is being created as the elements of a + # Configurations, which is a Collection subclass. + def __init__(self, service, path, **kwargs): + Collection.__init__(self, service, path, item=Stanza) + self.name = kwargs['state']['title'] + + +class Configurations(Collection): + """This class provides access to the configuration files from this Splunk + instance. Retrieve this collection using :meth:`Service.confs`. + + Splunk's configuration is divided into files, and each file into + stanzas. This collection is unusual in that the values in it are + themselves collections of :class:`ConfigurationFile` objects. + """ + def __init__(self, service): + Collection.__init__(self, service, PATH_PROPERTIES, item=ConfigurationFile) + if self.service.namespace.owner == '-' or self.service.namespace.app == '-': + raise ValueError("Configurations cannot have wildcards in namespace.") + + def __getitem__(self, key): + # The superclass implementation is designed for collections that contain + # entities. This collection (Configurations) contains collections + # (ConfigurationFile). + # + # The configurations endpoint returns multiple entities when we ask for a single file. + # This screws up the default implementation of __getitem__ from Collection, which thinks + # that multiple entities means a name collision, so we have to override it here. + try: + response = self.get(key) + return ConfigurationFile(self.service, PATH_CONF % key, state={'title': key}) + except HTTPError as he: + if he.status == 404: # No entity matching key + raise KeyError(key) + else: + raise + + def __contains__(self, key): + # configs/conf-{name} never returns a 404. We have to post to properties/{name} + # in order to find out if a configuration exists. + try: + response = self.get(key) + return True + except HTTPError as he: + if he.status == 404: # No entity matching key + return False + else: + raise + + def create(self, name): + """ Creates a configuration file named *name*. + + If there is already a configuration file with that name, + the existing file is returned. + + :param name: The name of the configuration file. + :type name: ``string`` + + :return: The :class:`ConfigurationFile` object. + """ + # This has to be overridden to handle the plumbing of creating + # a ConfigurationFile (which is a Collection) instead of some + # Entity. + if not isinstance(name, six.string_types): + raise ValueError("Invalid name: %s" % repr(name)) + response = self.post(__conf=name) + if response.status == 303: + return self[name] + elif response.status == 201: + return ConfigurationFile(self.service, PATH_CONF % name, item=Stanza, state={'title': name}) + else: + raise ValueError("Unexpected status code %s returned from creating a stanza" % response.status) + + def delete(self, key): + """Raises `IllegalOperationException`.""" + raise IllegalOperationException("Cannot delete configuration files from the REST API.") + + def _entity_path(self, state): + # Overridden to make all the ConfigurationFile objects + # returned refer to the configs/ path instead of the + # properties/ path used by Configrations. + return PATH_CONF % state['title'] + + +class Stanza(Entity): + """This class contains a single configuration stanza.""" + + def submit(self, stanza): + """Adds keys to the current configuration stanza as a + dictionary of key-value pairs. + + :param stanza: A dictionary of key-value pairs for the stanza. + :type stanza: ``dict`` + :return: The :class:`Stanza` object. + """ + body = _encode(**stanza) + self.service.post(self.path, body=body) + return self + + def __len__(self): + # The stanza endpoint returns all the keys at the same level in the XML as the eai information + # and 'disabled', so to get an accurate length, we have to filter those out and have just + # the stanza keys. + return len([x for x in self._state.content.keys() + if not x.startswith('eai') and x != 'disabled']) + + +class StoragePassword(Entity): + """This class contains a storage password. + """ + def __init__(self, service, path, **kwargs): + state = kwargs.get('state', None) + kwargs['skip_refresh'] = kwargs.get('skip_refresh', state is not None) + super(StoragePassword, self).__init__(service, path, **kwargs) + self._state = state + + @property + def clear_password(self): + return self.content.get('clear_password') + + @property + def encrypted_password(self): + return self.content.get('encr_password') + + @property + def realm(self): + return self.content.get('realm') + + @property + def username(self): + return self.content.get('username') + + +class StoragePasswords(Collection): + """This class provides access to the storage passwords from this Splunk + instance. Retrieve this collection using :meth:`Service.storage_passwords`. + """ + def __init__(self, service): + if service.namespace.owner == '-' or service.namespace.app == '-': + raise ValueError("StoragePasswords cannot have wildcards in namespace.") + super(StoragePasswords, self).__init__(service, PATH_STORAGE_PASSWORDS, item=StoragePassword) + + def create(self, password, username, realm=None): + """ Creates a storage password. + + A `StoragePassword` can be identified by , or by : if the + optional realm parameter is also provided. + + :param password: The password for the credentials - this is the only part of the credentials that will be stored securely. + :type name: ``string`` + :param username: The username for the credentials. + :type name: ``string`` + :param realm: The credential realm. (optional) + :type name: ``string`` + + :return: The :class:`StoragePassword` object created. + """ + if not isinstance(username, six.string_types): + raise ValueError("Invalid name: %s" % repr(username)) + + if realm is None: + response = self.post(password=password, name=username) + else: + response = self.post(password=password, realm=realm, name=username) + + if response.status != 201: + raise ValueError("Unexpected status code %s returned from creating a stanza" % response.status) + + entries = _load_atom_entries(response) + state = _parse_atom_entry(entries[0]) + storage_password = StoragePassword(self.service, self._entity_path(state), state=state, skip_refresh=True) + + return storage_password + + def delete(self, username, realm=None): + """Delete a storage password by username and/or realm. + + The identifier can be passed in through the username parameter as + or :, but the preferred way is by + passing in the username and realm parameters. + + :param username: The username for the credentials, or : if the realm parameter is omitted. + :type name: ``string`` + :param realm: The credential realm. (optional) + :type name: ``string`` + :return: The `StoragePassword` collection. + :rtype: ``self`` + """ + if realm is None: + # This case makes the username optional, so + # the full name can be passed in as realm. + # Assume it's already encoded. + name = username + else: + # Encode each component separately + name = UrlEncoded(realm, encode_slash=True) + ":" + UrlEncoded(username, encode_slash=True) + + # Append the : expected at the end of the name + if name[-1] != ":": + name = name + ":" + return Collection.delete(self, name) + + +class AlertGroup(Entity): + """This class represents a group of fired alerts for a saved search. Access + it using the :meth:`alerts` property.""" + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def __len__(self): + return self.count + + @property + def alerts(self): + """Returns a collection of triggered alerts. + + :return: A :class:`Collection` of triggered alerts. + """ + return Collection(self.service, self.path) + + @property + def count(self): + """Returns the count of triggered alerts. + + :return: The triggered alert count. + :rtype: ``integer`` + """ + return int(self.content.get('triggered_alert_count', 0)) + + +class Indexes(Collection): + """This class contains the collection of indexes in this Splunk instance. + Retrieve this collection using :meth:`Service.indexes`. + """ + def get_default(self): + """ Returns the name of the default index. + + :return: The name of the default index. + + """ + index = self['_audit'] + return index['defaultDatabase'] + + def delete(self, name): + """ Deletes a given index. + + **Note**: This method is only supported in Splunk 5.0 and later. + + :param name: The name of the index to delete. + :type name: ``string`` + """ + if self.service.splunk_version >= (5,): + Collection.delete(self, name) + else: + raise IllegalOperationException("Deleting indexes via the REST API is " + "not supported before Splunk version 5.") + + +class Index(Entity): + """This class represents an index and provides different operations, such as + cleaning the index, writing to the index, and so forth.""" + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def attach(self, host=None, source=None, sourcetype=None): + """Opens a stream (a writable socket) for writing events to the index. + + :param host: The host value for events written to the stream. + :type host: ``string`` + :param source: The source value for events written to the stream. + :type source: ``string`` + :param sourcetype: The sourcetype value for events written to the + stream. + :type sourcetype: ``string`` + + :return: A writable socket. + """ + args = { 'index': self.name } + if host is not None: args['host'] = host + if source is not None: args['source'] = source + if sourcetype is not None: args['sourcetype'] = sourcetype + path = UrlEncoded(PATH_RECEIVERS_STREAM + "?" + urllib.parse.urlencode(args), skip_encode=True) + + cookie_or_auth_header = "Authorization: Splunk %s\r\n" % \ + (self.service.token if self.service.token is _NoAuthenticationToken + else self.service.token.replace("Splunk ", "")) + + # If we have cookie(s), use them instead of "Authorization: ..." + if self.service.has_cookies(): + cookie_or_auth_header = "Cookie: %s\r\n" % _make_cookie_header(self.service.get_cookies().items()) + + # Since we need to stream to the index connection, we have to keep + # the connection open and use the Splunk extension headers to note + # the input mode + sock = self.service.connect() + headers = [("POST %s HTTP/1.1\r\n" % str(self.service._abspath(path))).encode('utf-8'), + ("Host: %s:%s\r\n" % (self.service.host, int(self.service.port))).encode('utf-8'), + b"Accept-Encoding: identity\r\n", + cookie_or_auth_header.encode('utf-8'), + b"X-Splunk-Input-Mode: Streaming\r\n", + b"\r\n"] + + for h in headers: + sock.write(h) + return sock + + @contextlib.contextmanager + def attached_socket(self, *args, **kwargs): + """Opens a raw socket in a ``with`` block to write data to Splunk. + + The arguments are identical to those for :meth:`attach`. The socket is + automatically closed at the end of the ``with`` block, even if an + exception is raised in the block. + + :param host: The host value for events written to the stream. + :type host: ``string`` + :param source: The source value for events written to the stream. + :type source: ``string`` + :param sourcetype: The sourcetype value for events written to the + stream. + :type sourcetype: ``string`` + + :returns: Nothing. + + **Example**:: + + import splunklib.client as client + s = client.connect(...) + index = s.indexes['some_index'] + with index.attached_socket(sourcetype='test') as sock: + sock.send('Test event\\r\\n') + + """ + try: + sock = self.attach(*args, **kwargs) + yield sock + finally: + sock.shutdown(socket.SHUT_RDWR) + sock.close() + + def clean(self, timeout=60): + """Deletes the contents of the index. + + This method blocks until the index is empty, because it needs to restore + values at the end of the operation. + + :param timeout: The time-out period for the operation, in seconds (the + default is 60). + :type timeout: ``integer`` + + :return: The :class:`Index`. + """ + self.refresh() + + tds = self['maxTotalDataSizeMB'] + ftp = self['frozenTimePeriodInSecs'] + was_disabled_initially = self.disabled + try: + if (not was_disabled_initially and \ + self.service.splunk_version < (5,)): + # Need to disable the index first on Splunk 4.x, + # but it doesn't work to disable it on 5.0. + self.disable() + self.update(maxTotalDataSizeMB=1, frozenTimePeriodInSecs=1) + self.roll_hot_buckets() + + # Wait until event count goes to 0. + start = datetime.now() + diff = timedelta(seconds=timeout) + while self.content.totalEventCount != '0' and datetime.now() < start+diff: + sleep(1) + self.refresh() + + if self.content.totalEventCount != '0': + raise OperationError("Cleaning index %s took longer than %s seconds; timing out." % (self.name, timeout)) + finally: + # Restore original values + self.update(maxTotalDataSizeMB=tds, frozenTimePeriodInSecs=ftp) + if (not was_disabled_initially and \ + self.service.splunk_version < (5,)): + # Re-enable the index if it was originally enabled and we messed with it. + self.enable() + + return self + + def roll_hot_buckets(self): + """Performs rolling hot buckets for this index. + + :return: The :class:`Index`. + """ + self.post("roll-hot-buckets") + return self + + def submit(self, event, host=None, source=None, sourcetype=None): + """Submits a single event to the index using ``HTTP POST``. + + :param event: The event to submit. + :type event: ``string`` + :param `host`: The host value of the event. + :type host: ``string`` + :param `source`: The source value of the event. + :type source: ``string`` + :param `sourcetype`: The sourcetype value of the event. + :type sourcetype: ``string`` + + :return: The :class:`Index`. + """ + args = { 'index': self.name } + if host is not None: args['host'] = host + if source is not None: args['source'] = source + if sourcetype is not None: args['sourcetype'] = sourcetype + + # The reason we use service.request directly rather than POST + # is that we are not sending a POST request encoded using + # x-www-form-urlencoded (as we do not have a key=value body), + # because we aren't really sending a "form". + self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) + return self + + # kwargs: host, host_regex, host_segment, rename-source, sourcetype + def upload(self, filename, **kwargs): + """Uploads a file for immediate indexing. + + **Note**: The file must be locally accessible from the server. + + :param filename: The name of the file to upload. The file can be a + plain, compressed, or archived file. + :type filename: ``string`` + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Index parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`Index`. + """ + kwargs['index'] = self.name + path = 'data/inputs/oneshot' + self.service.post(path, name=filename, **kwargs) + return self + + +class Input(Entity): + """This class represents a Splunk input. This class is the base for all + typed input classes and is also used when the client does not recognize an + input kind. + """ + def __init__(self, service, path, kind=None, **kwargs): + # kind can be omitted (in which case it is inferred from the path) + # Otherwise, valid values are the paths from data/inputs ("udp", + # "monitor", "tcp/raw"), or two special cases: "tcp" (which is "tcp/raw") + # and "splunktcp" (which is "tcp/cooked"). + Entity.__init__(self, service, path, **kwargs) + if kind is None: + path_segments = path.split('/') + i = path_segments.index('inputs') + 1 + if path_segments[i] == 'tcp': + self.kind = path_segments[i] + '/' + path_segments[i+1] + else: + self.kind = path_segments[i] + else: + self.kind = kind + + # Handle old input kind names. + if self.kind == 'tcp': + self.kind = 'tcp/raw' + if self.kind == 'splunktcp': + self.kind = 'tcp/cooked' + + def update(self, **kwargs): + """Updates the server with any changes you've made to the current input + along with any additional arguments you specify. + + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The input this method was called on. + :rtype: class:`Input` + """ + # UDP and TCP inputs require special handling due to their restrictToHost + # field. For all other inputs kinds, we can dispatch to the superclass method. + if self.kind not in ['tcp', 'splunktcp', 'tcp/raw', 'tcp/cooked', 'udp']: + return super(Input, self).update(**kwargs) + else: + # The behavior of restrictToHost is inconsistent across input kinds and versions of Splunk. + # In Splunk 4.x, the name of the entity is only the port, independent of the value of + # restrictToHost. In Splunk 5.0 this changed so the name will be of the form :. + # In 5.0 and 5.0.1, if you don't supply the restrictToHost value on every update, it will + # remove the host restriction from the input. As of 5.0.2 you simply can't change restrictToHost + # on an existing input. + + # The logic to handle all these cases: + # - Throw an exception if the user tries to set restrictToHost on an existing input + # for *any* version of Splunk. + # - Set the existing restrictToHost value on the update args internally so we don't + # cause it to change in Splunk 5.0 and 5.0.1. + to_update = kwargs.copy() + + if 'restrictToHost' in kwargs: + raise IllegalOperationException("Cannot set restrictToHost on an existing input with the SDK.") + elif 'restrictToHost' in self._state.content and self.kind != 'udp': + to_update['restrictToHost'] = self._state.content['restrictToHost'] + + # Do the actual update operation. + return super(Input, self).update(**to_update) + + +# Inputs is a "kinded" collection, which is a heterogenous collection where +# each item is tagged with a kind, that provides a single merged view of all +# input kinds. +class Inputs(Collection): + """This class represents a collection of inputs. The collection is + heterogeneous and each member of the collection contains a *kind* property + that indicates the specific type of input. + Retrieve this collection using :meth:`Service.inputs`.""" + + def __init__(self, service, kindmap=None): + Collection.__init__(self, service, PATH_INPUTS, item=Input) + + def __getitem__(self, key): + # The key needed to retrieve the input needs it's parenthesis to be URL encoded + # based on the REST API for input + # + if isinstance(key, tuple) and len(key) == 2: + # Fetch a single kind + key, kind = key + key = UrlEncoded(key, encode_slash=True) + try: + response = self.get(self.kindpath(kind) + "/" + key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException("Found multiple inputs of kind %s named %s." % (kind, key)) + elif len(entries) == 0: + raise KeyError((key, kind)) + else: + return entries[0] + except HTTPError as he: + if he.status == 404: # No entity matching kind and key + raise KeyError((key, kind)) + else: + raise + else: + # Iterate over all the kinds looking for matches. + kind = None + candidate = None + key = UrlEncoded(key, encode_slash=True) + for kind in self.kinds: + try: + response = self.get(kind + "/" + key) + entries = self._load_list(response) + if len(entries) > 1: + raise AmbiguousReferenceException("Found multiple inputs of kind %s named %s." % (kind, key)) + elif len(entries) == 0: + pass + else: + if candidate is not None: # Already found at least one candidate + raise AmbiguousReferenceException("Found multiple inputs named %s, please specify a kind" % key) + candidate = entries[0] + except HTTPError as he: + if he.status == 404: + pass # Just carry on to the next kind. + else: + raise + if candidate is None: + raise KeyError(key) # Never found a match. + else: + return candidate + + def __contains__(self, key): + if isinstance(key, tuple) and len(key) == 2: + # If we specify a kind, this will shortcut properly + try: + self.__getitem__(key) + return True + except KeyError: + return False + else: + # Without a kind, we want to minimize the number of round trips to the server, so we + # reimplement some of the behavior of __getitem__ in order to be able to stop searching + # on the first hit. + for kind in self.kinds: + try: + response = self.get(self.kindpath(kind) + "/" + key) + entries = self._load_list(response) + if len(entries) > 0: + return True + else: + pass + except HTTPError as he: + if he.status == 404: + pass # Just carry on to the next kind. + else: + raise + return False + + def create(self, name, kind, **kwargs): + """Creates an input of a specific kind in this collection, with any + arguments you specify. + + :param `name`: The input name. + :type name: ``string`` + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + :param `kwargs`: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + + :type kwargs: ``dict`` + + :return: The new :class:`Input`. + """ + kindpath = self.kindpath(kind) + self.post(kindpath, name=name, **kwargs) + + # If we created an input with restrictToHost set, then + # its path will be :, not just , + # and we have to adjust accordingly. + + # Url encodes the name of the entity. + name = UrlEncoded(name, encode_slash=True) + path = _path( + self.path + kindpath, + '%s:%s' % (kwargs['restrictToHost'], name) \ + if 'restrictToHost' in kwargs else name + ) + return Input(self.service, path, kind) + + def delete(self, name, kind=None): + """Removes an input from the collection. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + :param name: The name of the input to remove. + :type name: ``string`` + + :return: The :class:`Inputs` collection. + """ + if kind is None: + self.service.delete(self[name].path) + else: + self.service.delete(self[name, kind].path) + return self + + def itemmeta(self, kind): + """Returns metadata for the members of a given kind. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + + :return: The metadata. + :rtype: class:``splunklib.data.Record`` + """ + response = self.get("%s/_new" % self._kindmap[kind]) + content = _load_atom(response, MATCH_ENTRY_CONTENT) + return _parse_atom_metadata(content) + + def _get_kind_list(self, subpath=None): + if subpath is None: + subpath = [] + + kinds = [] + response = self.get('/'.join(subpath)) + content = _load_atom_entries(response) + for entry in content: + this_subpath = subpath + [entry.title] + # The "all" endpoint doesn't work yet. + # The "tcp/ssl" endpoint is not a real input collection. + if entry.title == 'all' or this_subpath == ['tcp','ssl']: + continue + elif 'create' in [x.rel for x in entry.link]: + path = '/'.join(subpath + [entry.title]) + kinds.append(path) + else: + subkinds = self._get_kind_list(subpath + [entry.title]) + kinds.extend(subkinds) + return kinds + + @property + def kinds(self): + """Returns the input kinds on this Splunk instance. + + :return: The list of input kinds. + :rtype: ``list`` + """ + return self._get_kind_list() + + def kindpath(self, kind): + """Returns a path to the resources for a given input kind. + + :param `kind`: The kind of input: + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kind: ``string`` + + :return: The relative endpoint path. + :rtype: ``string`` + """ + if kind == 'tcp': + return UrlEncoded('tcp/raw', skip_encode=True) + elif kind == 'splunktcp': + return UrlEncoded('tcp/cooked', skip_encode=True) + else: + return UrlEncoded(kind, skip_encode=True) + + def list(self, *kinds, **kwargs): + """Returns a list of inputs that are in the :class:`Inputs` collection. + You can also filter by one or more input kinds. + + This function iterates over all possible inputs, regardless of any arguments you + specify. Because the :class:`Inputs` collection is the union of all the inputs of each + kind, this method implements parameters such as "count", "search", and so + on at the Python level once all the data has been fetched. The exception + is when you specify a single input kind, and then this method makes a single request + with the usual semantics for parameters. + + :param kinds: The input kinds to return (optional). + + - "ad": Active Directory + + - "monitor": Files and directories + + - "registry": Windows Registry + + - "script": Scripts + + - "splunktcp": TCP, processed + + - "tcp": TCP, unprocessed + + - "udp": UDP + + - "win-event-log-collections": Windows event log + + - "win-perfmon": Performance monitoring + + - "win-wmi-collections": WMI + + :type kinds: ``string`` + :param kwargs: Additional arguments (optional): + + - "count" (``integer``): The maximum number of items to return. + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + + :return: A list of input kinds. + :rtype: ``list`` + """ + if len(kinds) == 0: + kinds = self.kinds + if len(kinds) == 1: + kind = kinds[0] + logging.debug("Inputs.list taking short circuit branch for single kind.") + path = self.kindpath(kind) + logging.debug("Path for inputs: %s", path) + try: + path = UrlEncoded(path, skip_encode=True) + response = self.get(path, **kwargs) + except HTTPError as he: + if he.status == 404: # No inputs of this kind + return [] + entities = [] + entries = _load_atom_entries(response) + if entries is None: + return [] # No inputs in a collection comes back with no feed or entry in the XML + for entry in entries: + state = _parse_atom_entry(entry) + # Unquote the URL, since all URL encoded in the SDK + # should be of type UrlEncoded, and all str should not + # be URL encoded. + path = urllib.parse.unquote(state.links.alternate) + entity = Input(self.service, path, kind, state=state) + entities.append(entity) + return entities + + search = kwargs.get('search', '*') + + entities = [] + for kind in kinds: + response = None + try: + kind = UrlEncoded(kind, skip_encode=True) + response = self.get(self.kindpath(kind), search=search) + except HTTPError as e: + if e.status == 404: + continue # No inputs of this kind + else: + raise + + entries = _load_atom_entries(response) + if entries is None: continue # No inputs to process + for entry in entries: + state = _parse_atom_entry(entry) + # Unquote the URL, since all URL encoded in the SDK + # should be of type UrlEncoded, and all str should not + # be URL encoded. + path = urllib.parse.unquote(state.links.alternate) + entity = Input(self.service, path, kind, state=state) + entities.append(entity) + if 'offset' in kwargs: + entities = entities[kwargs['offset']:] + if 'count' in kwargs: + entities = entities[:kwargs['count']] + if kwargs.get('sort_mode', None) == 'alpha': + sort_field = kwargs.get('sort_field', 'name') + if sort_field == 'name': + f = lambda x: x.name.lower() + else: + f = lambda x: x[sort_field].lower() + entities = sorted(entities, key=f) + if kwargs.get('sort_mode', None) == 'alpha_case': + sort_field = kwargs.get('sort_field', 'name') + if sort_field == 'name': + f = lambda x: x.name + else: + f = lambda x: x[sort_field] + entities = sorted(entities, key=f) + if kwargs.get('sort_dir', 'asc') == 'desc': + entities = list(reversed(entities)) + return entities + + def __iter__(self, **kwargs): + for item in self.iter(**kwargs): + yield item + + def iter(self, **kwargs): + """ Iterates over the collection of inputs. + + :param kwargs: Additional arguments (optional): + + - "count" (``integer``): The maximum number of items to return. + + - "offset" (``integer``): The offset of the first item to return. + + - "search" (``string``): The search query to filter responses. + + - "sort_dir" (``string``): The direction to sort returned items: + "asc" or "desc". + + - "sort_key" (``string``): The field to use for sorting (optional). + + - "sort_mode" (``string``): The collating sequence for sorting + returned items: "auto", "alpha", "alpha_case", or "num". + + :type kwargs: ``dict`` + """ + for item in self.list(**kwargs): + yield item + + def oneshot(self, path, **kwargs): + """ Creates a oneshot data input, which is an upload of a single file + for one-time indexing. + + :param path: The path and filename. + :type path: ``string`` + :param kwargs: Additional arguments (optional). For more about the + available parameters, see `Input parameters `_ on Splunk Developer Portal. + :type kwargs: ``dict`` + """ + self.post('oneshot', name=path, **kwargs) + + +class Job(Entity): + """This class represents a search job.""" + def __init__(self, service, sid, **kwargs): + path = PATH_JOBS + sid + Entity.__init__(self, service, path, skip_refresh=True, **kwargs) + self.sid = sid + + # The Job entry record is returned at the root of the response + def _load_atom_entry(self, response): + return _load_atom(response).entry + + def cancel(self): + """Stops the current search and deletes the results cache. + + :return: The :class:`Job`. + """ + try: + self.post("control", action="cancel") + except HTTPError as he: + if he.status == 404: + # The job has already been cancelled, so + # cancelling it twice is a nop. + pass + else: + raise + return self + + def disable_preview(self): + """Disables preview for this job. + + :return: The :class:`Job`. + """ + self.post("control", action="disablepreview") + return self + + def enable_preview(self): + """Enables preview for this job. + + **Note**: Enabling preview might slow search considerably. + + :return: The :class:`Job`. + """ + self.post("control", action="enablepreview") + return self + + def events(self, **kwargs): + """Returns a streaming handle to this job's events. + + :param kwargs: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/events + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's events. + """ + kwargs['segmentation'] = kwargs.get('segmentation', 'none') + return self.get("events", **kwargs).body + + def finalize(self): + """Stops the job and provides intermediate results for retrieval. + + :return: The :class:`Job`. + """ + self.post("control", action="finalize") + return self + + def is_done(self): + """Indicates whether this job finished running. + + :return: ``True`` if the job is done, ``False`` if not. + :rtype: ``boolean`` + """ + if not self.is_ready(): + return False + done = (self._state.content['isDone'] == '1') + return done + + def is_ready(self): + """Indicates whether this job is ready for querying. + + :return: ``True`` if the job is ready, ``False`` if not. + :rtype: ``boolean`` + + """ + response = self.get() + if response.status == 204: + return False + self._state = self.read(response) + ready = self._state.content['dispatchState'] not in ['QUEUED', 'PARSING'] + return ready + + @property + def name(self): + """Returns the name of the search job, which is the search ID (SID). + + :return: The search ID. + :rtype: ``string`` + """ + return self.sid + + def pause(self): + """Suspends the current search. + + :return: The :class:`Job`. + """ + self.post("control", action="pause") + return self + + def results(self, **query_params): + """Returns a streaming handle to this job's search results. To get a + nice, Pythonic iterator, pass the handle to :class:`splunklib.results.ResultsReader`, + as in:: + + import splunklib.client as client + import splunklib.results as results + from time import sleep + service = client.connect(...) + job = service.jobs.create("search * | head 5") + while not job.is_done(): + sleep(.2) + rr = results.ResultsReader(job.results()) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print '%s: %s' % (result.type, result.message) + elif isinstance(result, dict): + # Normal events are returned as dicts + print result + assert rr.is_preview == False + + Results are not available until the job has finished. If called on + an unfinished job, the result is an empty event set. + + This method makes a single roundtrip + to the server, plus at most two additional round trips if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param query_params: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/results + `_. + :type query_params: ``dict`` + + :return: The ``InputStream`` IO handle to this job's results. + """ + query_params['segmentation'] = query_params.get('segmentation', 'none') + return self.get("results", **query_params).body + + def preview(self, **query_params): + """Returns a streaming handle to this job's preview search results. + + Unlike :class:`splunklib.results.ResultsReader`, which requires a job to + be finished to + return any results, the ``preview`` method returns any results that have + been generated so far, whether the job is running or not. The + returned search results are the raw data from the server. Pass + the handle returned to :class:`splunklib.results.ResultsReader` to get a + nice, Pythonic iterator over objects, as in:: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + job = service.jobs.create("search * | head 5") + rr = results.ResultsReader(job.preview()) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print '%s: %s' % (result.type, result.message) + elif isinstance(result, dict): + # Normal events are returned as dicts + print result + if rr.is_preview: + print "Preview of a running search job." + else: + print "Job is finished. Results are final." + + This method makes one roundtrip to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param query_params: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/results_preview + `_ + in the REST API documentation. + :type query_params: ``dict`` + + :return: The ``InputStream`` IO handle to this job's preview results. + """ + query_params['segmentation'] = query_params.get('segmentation', 'none') + return self.get("results_preview", **query_params).body + + def searchlog(self, **kwargs): + """Returns a streaming handle to this job's search log. + + :param `kwargs`: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/search.log + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's search log. + """ + return self.get("search.log", **kwargs).body + + def set_priority(self, value): + """Sets this job's search priority in the range of 0-10. + + Higher numbers indicate higher priority. Unless splunkd is + running as *root*, you can only decrease the priority of a running job. + + :param `value`: The search priority. + :type value: ``integer`` + + :return: The :class:`Job`. + """ + self.post('control', action="setpriority", priority=value) + return self + + def summary(self, **kwargs): + """Returns a streaming handle to this job's summary. + + :param `kwargs`: Additional parameters (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/summary + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's summary. + """ + return self.get("summary", **kwargs).body + + def timeline(self, **kwargs): + """Returns a streaming handle to this job's timeline results. + + :param `kwargs`: Additional timeline arguments (optional). For a list of valid + parameters, see `GET search/jobs/{search_id}/timeline + `_ + in the REST API documentation. + :type kwargs: ``dict`` + + :return: The ``InputStream`` IO handle to this job's timeline. + """ + return self.get("timeline", **kwargs).body + + def touch(self): + """Extends the expiration time of the search to the current time (now) plus + the time-to-live (ttl) value. + + :return: The :class:`Job`. + """ + self.post("control", action="touch") + return self + + def set_ttl(self, value): + """Set the job's time-to-live (ttl) value, which is the time before the + search job expires and is still available. + + :param `value`: The ttl value, in seconds. + :type value: ``integer`` + + :return: The :class:`Job`. + """ + self.post("control", action="setttl", ttl=value) + return self + + def unpause(self): + """Resumes the current search, if paused. + + :return: The :class:`Job`. + """ + self.post("control", action="unpause") + return self + + +class Jobs(Collection): + """This class represents a collection of search jobs. Retrieve this + collection using :meth:`Service.jobs`.""" + def __init__(self, service): + Collection.__init__(self, service, PATH_JOBS, item=Job) + # The count value to say list all the contents of this + # Collection is 0, not -1 as it is on most. + self.null_count = 0 + + def _load_list(self, response): + # Overridden because Job takes a sid instead of a path. + entries = _load_atom_entries(response) + if entries is None: return [] + entities = [] + for entry in entries: + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + entry['content']['sid'], + state=state) + entities.append(entity) + return entities + + def create(self, query, **kwargs): + """ Creates a search using a search query and any additional parameters + you provide. + + :param query: The search query. + :type query: ``string`` + :param kwargs: Additiona parameters (optional). For a list of available + parameters, see `Search job parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`Job`. + """ + if kwargs.get("exec_mode", None) == "oneshot": + raise TypeError("Cannot specify exec_mode=oneshot; use the oneshot method instead.") + response = self.post(search=query, **kwargs) + sid = _load_sid(response) + return Job(self.service, sid) + + def export(self, query, **params): + """Runs a search and immediately starts streaming preview events. + This method returns a streaming handle to this job's events as an XML + document from the server. To parse this stream into usable Python objects, + pass the handle to :class:`splunklib.results.ResultsReader`:: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + rr = results.ResultsReader(service.jobs.export("search * | head 5")) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print '%s: %s' % (result.type, result.message) + elif isinstance(result, dict): + # Normal events are returned as dicts + print result + assert rr.is_preview == False + + Running an export search is more efficient as it streams the results + directly to you, rather than having to write them out to disk and make + them available later. As soon as results are ready, you will receive + them. + + The ``export`` method makes a single roundtrip to the server (as opposed + to two for :meth:`create` followed by :meth:`preview`), plus at most two + more if the ``autologin`` field of :func:`connect` is set to ``True``. + + :raises `ValueError`: Raised for invalid queries. + :param query: The search query. + :type query: ``string`` + :param params: Additional arguments (optional). For a list of valid + parameters, see `GET search/jobs/export + `_ + in the REST API documentation. + :type params: ``dict`` + + :return: The ``InputStream`` IO handle to raw XML returned from the server. + """ + if "exec_mode" in params: + raise TypeError("Cannot specify an exec_mode to export.") + params['segmentation'] = params.get('segmentation', 'none') + return self.post(path_segment="export", + search=query, + **params).body + + def itemmeta(self): + """There is no metadata available for class:``Jobs``. + + Any call to this method raises a class:``NotSupportedError``. + + :raises: class:``NotSupportedError`` + """ + raise NotSupportedError() + + def oneshot(self, query, **params): + """Run a oneshot search and returns a streaming handle to the results. + + The ``InputStream`` object streams XML fragments from the server. To + parse this stream into usable Python objects, + pass the handle to :class:`splunklib.results.ResultsReader`:: + + import splunklib.client as client + import splunklib.results as results + service = client.connect(...) + rr = results.ResultsReader(service.jobs.oneshot("search * | head 5")) + for result in rr: + if isinstance(result, results.Message): + # Diagnostic messages may be returned in the results + print '%s: %s' % (result.type, result.message) + elif isinstance(result, dict): + # Normal events are returned as dicts + print result + assert rr.is_preview == False + + The ``oneshot`` method makes a single roundtrip to the server (as opposed + to two for :meth:`create` followed by :meth:`results`), plus at most two more + if the ``autologin`` field of :func:`connect` is set to ``True``. + + :raises ValueError: Raised for invalid queries. + + :param query: The search query. + :type query: ``string`` + :param params: Additional arguments (optional): + + - "output_mode": Specifies the output format of the results (XML, + JSON, or CSV). + + - "earliest_time": Specifies the earliest time in the time range to + search. The time string can be a UTC time (with fractional seconds), + a relative time specifier (to now), or a formatted time string. + + - "latest_time": Specifies the latest time in the time range to + search. The time string can be a UTC time (with fractional seconds), + a relative time specifier (to now), or a formatted time string. + + - "rf": Specifies one or more fields to add to the search. + + :type params: ``dict`` + + :return: The ``InputStream`` IO handle to raw XML returned from the server. + """ + if "exec_mode" in params: + raise TypeError("Cannot specify an exec_mode to oneshot.") + params['segmentation'] = params.get('segmentation', 'none') + return self.post(search=query, + exec_mode="oneshot", + **params).body + + +class Loggers(Collection): + """This class represents a collection of service logging categories. + Retrieve this collection using :meth:`Service.loggers`.""" + def __init__(self, service): + Collection.__init__(self, service, PATH_LOGGER) + + def itemmeta(self): + """There is no metadata available for class:``Loggers``. + + Any call to this method raises a class:``NotSupportedError``. + + :raises: class:``NotSupportedError`` + """ + raise NotSupportedError() + + +class Message(Entity): + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + @property + def value(self): + """Returns the message value. + + :return: The message value. + :rtype: ``string`` + """ + return self[self.name] + + +class ModularInputKind(Entity): + """This class contains the different types of modular inputs. Retrieve this + collection using :meth:`Service.modular_input_kinds`. + """ + def __contains__(self, name): + args = self.state.content['endpoints']['args'] + if name in args: + return True + else: + return Entity.__contains__(self, name) + + def __getitem__(self, name): + args = self.state.content['endpoint']['args'] + if name in args: + return args['item'] + else: + return Entity.__getitem__(self, name) + + @property + def arguments(self): + """A dictionary of all the arguments supported by this modular input kind. + + The keys in the dictionary are the names of the arguments. The values are + another dictionary giving the metadata about that argument. The possible + keys in that dictionary are ``"title"``, ``"description"``, ``"required_on_create``", + ``"required_on_edit"``, ``"data_type"``. Each value is a string. It should be one + of ``"true"`` or ``"false"`` for ``"required_on_create"`` and ``"required_on_edit"``, + and one of ``"boolean"``, ``"string"``, or ``"number``" for ``"data_type"``. + + :return: A dictionary describing the arguments this modular input kind takes. + :rtype: ``dict`` + """ + return self.state.content['endpoint']['args'] + + def update(self, **kwargs): + """Raises an error. Modular input kinds are read only.""" + raise IllegalOperationException("Modular input kinds cannot be updated via the REST API.") + + +class SavedSearch(Entity): + """This class represents a saved search.""" + def __init__(self, service, path, **kwargs): + Entity.__init__(self, service, path, **kwargs) + + def acknowledge(self): + """Acknowledges the suppression of alerts from this saved search and + resumes alerting. + + :return: The :class:`SavedSearch`. + """ + self.post("acknowledge") + return self + + @property + def alert_count(self): + """Returns the number of alerts fired by this saved search. + + :return: The number of alerts fired by this saved search. + :rtype: ``integer`` + """ + return int(self._state.content.get('triggered_alert_count', 0)) + + def dispatch(self, **kwargs): + """Runs the saved search and returns the resulting search job. + + :param `kwargs`: Additional dispatch arguments (optional). For details, + see the `POST saved/searches/{name}/dispatch + `_ + endpoint in the REST API documentation. + :type kwargs: ``dict`` + :return: The :class:`Job`. + """ + response = self.post("dispatch", **kwargs) + sid = _load_sid(response) + return Job(self.service, sid) + + @property + def fired_alerts(self): + """Returns the collection of fired alerts (a fired alert group) + corresponding to this saved search's alerts. + + :raises IllegalOperationException: Raised when the search is not scheduled. + + :return: A collection of fired alerts. + :rtype: :class:`AlertGroup` + """ + if self['is_scheduled'] == '0': + raise IllegalOperationException('Unscheduled saved searches have no alerts.') + c = Collection( + self.service, + self.service._abspath(PATH_FIRED_ALERTS + self.name, + owner=self._state.access.owner, + app=self._state.access.app, + sharing=self._state.access.sharing), + item=AlertGroup) + return c + + def history(self): + """Returns a list of search jobs corresponding to this saved search. + + :return: A list of :class:`Job` objects. + """ + response = self.get("history") + entries = _load_atom_entries(response) + if entries is None: return [] + jobs = [] + for entry in entries: + job = Job(self.service, entry.title) + jobs.append(job) + return jobs + + def update(self, search=None, **kwargs): + """Updates the server with any changes you've made to the current saved + search along with any additional arguments you specify. + + :param `search`: The search query (optional). + :type search: ``string`` + :param `kwargs`: Additional arguments (optional). For a list of available + parameters, see `Saved search parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + + :return: The :class:`SavedSearch`. + """ + # Updates to a saved search *require* that the search string be + # passed, so we pass the current search string if a value wasn't + # provided by the caller. + if search is None: search = self.content.search + Entity.update(self, search=search, **kwargs) + return self + + def scheduled_times(self, earliest_time='now', latest_time='+1h'): + """Returns the times when this search is scheduled to run. + + By default this method returns the times in the next hour. For different + time ranges, set *earliest_time* and *latest_time*. For example, + for all times in the last day use "earliest_time=-1d" and + "latest_time=now". + + :param earliest_time: The earliest time. + :type earliest_time: ``string`` + :param latest_time: The latest time. + :type latest_time: ``string`` + + :return: The list of search times. + """ + response = self.get("scheduled_times", + earliest_time=earliest_time, + latest_time=latest_time) + data = self._load_atom_entry(response) + rec = _parse_atom_entry(data) + times = [datetime.fromtimestamp(int(t)) + for t in rec.content.scheduled_times] + return times + + def suppress(self, expiration): + """Skips any scheduled runs of this search in the next *expiration* + number of seconds. + + :param expiration: The expiration period, in seconds. + :type expiration: ``integer`` + + :return: The :class:`SavedSearch`. + """ + self.post("suppress", expiration=expiration) + return self + + @property + def suppressed(self): + """Returns the number of seconds that this search is blocked from running + (possibly 0). + + :return: The number of seconds. + :rtype: ``integer`` + """ + r = self._run_action("suppress") + if r.suppressed == "1": + return int(r.expiration) + else: + return 0 + + def unsuppress(self): + """Cancels suppression and makes this search run as scheduled. + + :return: The :class:`SavedSearch`. + """ + self.post("suppress", expiration="0") + return self + + +class SavedSearches(Collection): + """This class represents a collection of saved searches. Retrieve this + collection using :meth:`Service.saved_searches`.""" + def __init__(self, service): + Collection.__init__( + self, service, PATH_SAVED_SEARCHES, item=SavedSearch) + + def create(self, name, search, **kwargs): + """ Creates a saved search. + + :param name: The name for the saved search. + :type name: ``string`` + :param search: The search query. + :type search: ``string`` + :param kwargs: Additional arguments (optional). For a list of available + parameters, see `Saved search parameters + `_ + on Splunk Developer Portal. + :type kwargs: ``dict`` + :return: The :class:`SavedSearches` collection. + """ + return Collection.create(self, name, search=search, **kwargs) + + +class Settings(Entity): + """This class represents configuration settings for a Splunk service. + Retrieve this collection using :meth:`Service.settings`.""" + def __init__(self, service, **kwargs): + Entity.__init__(self, service, "/services/server/settings", **kwargs) + + # Updates on the settings endpoint are POSTed to server/settings/settings. + def update(self, **kwargs): + """Updates the settings on the server using the arguments you provide. + + :param kwargs: Additional arguments. For a list of valid arguments, see + `POST server/settings/{name} + `_ + in the REST API documentation. + :type kwargs: ``dict`` + :return: The :class:`Settings` collection. + """ + self.service.post("/services/server/settings/settings", **kwargs) + return self + + +class User(Entity): + """This class represents a Splunk user. + """ + @property + def role_entities(self): + """Returns a list of roles assigned to this user. + + :return: The list of roles. + :rtype: ``list`` + """ + return [self.service.roles[name] for name in self.content.roles] + + +# Splunk automatically lowercases new user names so we need to match that +# behavior here to ensure that the subsequent member lookup works correctly. +class Users(Collection): + """This class represents the collection of Splunk users for this instance of + Splunk. Retrieve this collection using :meth:`Service.users`. + """ + def __init__(self, service): + Collection.__init__(self, service, PATH_USERS, item=User) + + def __getitem__(self, key): + return Collection.__getitem__(self, key.lower()) + + def __contains__(self, name): + return Collection.__contains__(self, name.lower()) + + def create(self, username, password, roles, **params): + """Creates a new user. + + This function makes two roundtrips to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param username: The username. + :type username: ``string`` + :param password: The password. + :type password: ``string`` + :param roles: A single role or list of roles for the user. + :type roles: ``string`` or ``list`` + :param params: Additional arguments (optional). For a list of available + parameters, see `User authentication parameters + `_ + on Splunk Developer Portal. + :type params: ``dict`` + + :return: The new user. + :rtype: :class:`User` + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + users = c.users + boris = users.create("boris", "securepassword", roles="user") + hilda = users.create("hilda", "anotherpassword", roles=["user","power"]) + """ + if not isinstance(username, six.string_types): + raise ValueError("Invalid username: %s" % str(username)) + username = username.lower() + self.post(name=username, password=password, roles=roles, **params) + # splunkd doesn't return the user in the POST response body, + # so we have to make a second round trip to fetch it. + response = self.get(username) + entry = _load_atom(response, XNAME_ENTRY).entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + urllib.parse.unquote(state.links.alternate), + state=state) + return entity + + def delete(self, name): + """ Deletes the user and returns the resulting collection of users. + + :param name: The name of the user to delete. + :type name: ``string`` + + :return: + :rtype: :class:`Users` + """ + return Collection.delete(self, name.lower()) + + +class Role(Entity): + """This class represents a user role. + """ + def grant(self, *capabilities_to_grant): + """Grants additional capabilities to this role. + + :param capabilities_to_grant: Zero or more capabilities to grant this + role. For a list of capabilities, see + `Capabilities `_ + on Splunk Developer Portal. + :type capabilities_to_grant: ``string`` or ``list`` + :return: The :class:`Role`. + + **Example**:: + + service = client.connect(...) + role = service.roles['somerole'] + role.grant('change_own_password', 'search') + """ + possible_capabilities = self.service.capabilities + for capability in capabilities_to_grant: + if capability not in possible_capabilities: + raise NoSuchCapability(capability) + new_capabilities = self['capabilities'] + list(capabilities_to_grant) + self.post(capabilities=new_capabilities) + return self + + def revoke(self, *capabilities_to_revoke): + """Revokes zero or more capabilities from this role. + + :param capabilities_to_revoke: Zero or more capabilities to grant this + role. For a list of capabilities, see + `Capabilities `_ + on Splunk Developer Portal. + :type capabilities_to_revoke: ``string`` or ``list`` + + :return: The :class:`Role`. + + **Example**:: + + service = client.connect(...) + role = service.roles['somerole'] + role.revoke('change_own_password', 'search') + """ + possible_capabilities = self.service.capabilities + for capability in capabilities_to_revoke: + if capability not in possible_capabilities: + raise NoSuchCapability(capability) + old_capabilities = self['capabilities'] + new_capabilities = [] + for c in old_capabilities: + if c not in capabilities_to_revoke: + new_capabilities.append(c) + if new_capabilities == []: + new_capabilities = '' # Empty lists don't get passed in the body, so we have to force an empty argument. + self.post(capabilities=new_capabilities) + return self + + +class Roles(Collection): + """This class represents the collection of roles in the Splunk instance. + Retrieve this collection using :meth:`Service.roles`.""" + def __init__(self, service): + return Collection.__init__(self, service, PATH_ROLES, item=Role) + + def __getitem__(self, key): + return Collection.__getitem__(self, key.lower()) + + def __contains__(self, name): + return Collection.__contains__(self, name.lower()) + + def create(self, name, **params): + """Creates a new role. + + This function makes two roundtrips to the server, plus at most + two more if + the ``autologin`` field of :func:`connect` is set to ``True``. + + :param name: Name for the role. + :type name: ``string`` + :param params: Additional arguments (optional). For a list of available + parameters, see `Roles parameters + `_ + on Splunk Developer Portal. + :type params: ``dict`` + + :return: The new role. + :rtype: :class:`Role` + + **Example**:: + + import splunklib.client as client + c = client.connect(...) + roles = c.roles + paltry = roles.create("paltry", imported_roles="user", defaultApp="search") + """ + if not isinstance(name, six.string_types): + raise ValueError("Invalid role name: %s" % str(name)) + name = name.lower() + self.post(name=name, **params) + # splunkd doesn't return the user in the POST response body, + # so we have to make a second round trip to fetch it. + response = self.get(name) + entry = _load_atom(response, XNAME_ENTRY).entry + state = _parse_atom_entry(entry) + entity = self.item( + self.service, + urllib.parse.unquote(state.links.alternate), + state=state) + return entity + + def delete(self, name): + """ Deletes the role and returns the resulting collection of roles. + + :param name: The name of the role to delete. + :type name: ``string`` + + :rtype: The :class:`Roles` + """ + return Collection.delete(self, name.lower()) + + +class Application(Entity): + """Represents a locally-installed Splunk app.""" + @property + def setupInfo(self): + """Returns the setup information for the app. + + :return: The setup information. + """ + return self.content.get('eai:setup', None) + + def package(self): + """ Creates a compressed package of the app for archiving.""" + return self._run_action("package") + + def updateInfo(self): + """Returns any update information that is available for the app.""" + return self._run_action("update") + +class KVStoreCollections(Collection): + def __init__(self, service): + Collection.__init__(self, service, 'storage/collections/config', item=KVStoreCollection) + + def create(self, name, indexes = {}, fields = {}, **kwargs): + """Creates a KV Store Collection. + + :param name: name of collection to create + :type name: ``string`` + :param indexes: dictionary of index definitions + :type indexes: ``dict`` + :param fields: dictionary of field definitions + :type fields: ``dict`` + :param kwargs: a dictionary of additional parameters specifying indexes and field definitions + :type kwargs: ``dict`` + + :return: Result of POST request + """ + for k, v in six.iteritems(indexes): + if isinstance(v, dict): + v = json.dumps(v) + kwargs['index.' + k] = v + for k, v in six.iteritems(fields): + kwargs['field.' + k] = v + return self.post(name=name, **kwargs) + +class KVStoreCollection(Entity): + @property + def data(self): + """Returns data object for this Collection. + + :rtype: :class:`KVStoreCollectionData` + """ + return KVStoreCollectionData(self) + + def update_index(self, name, value): + """Changes the definition of a KV Store index. + + :param name: name of index to change + :type name: ``string`` + :param value: new index definition + :type value: ``dict`` or ``string`` + + :return: Result of POST request + """ + kwargs = {} + kwargs['index.' + name] = value if isinstance(value, basestring) else json.dumps(value) + return self.post(**kwargs) + + def update_field(self, name, value): + """Changes the definition of a KV Store field. + + :param name: name of field to change + :type name: ``string`` + :param value: new field definition + :type value: ``string`` + + :return: Result of POST request + """ + kwargs = {} + kwargs['field.' + name] = value + return self.post(**kwargs) + +class KVStoreCollectionData(object): + """This class represents the data endpoint for a KVStoreCollection. + + Retrieve using :meth:`KVStoreCollection.data` + """ + JSON_HEADER = [('Content-Type', 'application/json')] + + def __init__(self, collection): + self.service = collection.service + self.collection = collection + self.owner, self.app, self.sharing = collection._proper_namespace() + self.path = 'storage/collections/data/' + UrlEncoded(self.collection.name) + '/' + + def _get(self, url, **kwargs): + return self.service.get(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def _post(self, url, **kwargs): + return self.service.post(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def _delete(self, url, **kwargs): + return self.service.delete(self.path + url, owner=self.owner, app=self.app, sharing=self.sharing, **kwargs) + + def query(self, **query): + """ + Gets the results of query, with optional parameters sort, limit, skip, and fields. + + :param query: Optional parameters. Valid options are sort, limit, skip, and fields + :type query: ``dict`` + + :return: Array of documents retrieved by query. + :rtype: ``array`` + """ + return json.loads(self._get('', **query).body.read().decode('utf-8')) + + def query_by_id(self, id): + """ + Returns object with _id = id. + + :param id: Value for ID. If not a string will be coerced to string. + :type id: ``string`` + + :return: Document with id + :rtype: ``dict`` + """ + return json.loads(self._get(UrlEncoded(str(id))).body.read().decode('utf-8')) + + def insert(self, data): + """ + Inserts item into this collection. An _id field will be generated if not assigned in the data. + + :param data: Document to insert + :type data: ``string`` + + :return: _id of inserted object + :rtype: ``dict`` + """ + return json.loads(self._post('', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def delete(self, query=None): + """ + Deletes all data in collection if query is absent. Otherwise, deletes all data matched by query. + + :param query: Query to select documents to delete + :type query: ``string`` + + :return: Result of DELETE request + """ + return self._delete('', **({'query': query}) if query else {}) + + def delete_by_id(self, id): + """ + Deletes document that has _id = id. + + :param id: id of document to delete + :type id: ``string`` + + :return: Result of DELETE request + """ + return self._delete(UrlEncoded(str(id))) + + def update(self, id, data): + """ + Replaces document with _id = id with data. + + :param id: _id of document to update + :type id: ``string`` + :param data: the new document to insert + :type data: ``string`` + + :return: id of replaced document + :rtype: ``dict`` + """ + return json.loads(self._post(UrlEncoded(str(id)), headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def batch_find(self, *dbqueries): + """ + Returns array of results from queries dbqueries. + + :param dbqueries: Array of individual queries as dictionaries + :type dbqueries: ``array`` of ``dict`` + + :return: Results of each query + :rtype: ``array`` of ``array`` + """ + if len(dbqueries) < 1: + raise Exception('Must have at least one query.') + + data = json.dumps(dbqueries) + + return json.loads(self._post('batch_find', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) + + def batch_save(self, *documents): + """ + Inserts or updates every document specified in documents. + + :param documents: Array of documents to save as dictionaries + :type documents: ``array`` of ``dict`` + + :return: Results of update operation as overall stats + :rtype: ``dict`` + """ + if len(documents) < 1: + raise Exception('Must have at least one document.') + + data = json.dumps(documents) + + return json.loads(self._post('batch_save', headers=KVStoreCollectionData.JSON_HEADER, body=data).body.read().decode('utf-8')) diff --git a/bin/splunklib/data.py b/bin/splunklib/data.py new file mode 100644 index 0000000..dedbb33 --- /dev/null +++ b/bin/splunklib/data.py @@ -0,0 +1,266 @@ +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The **splunklib.data** module reads the responses from splunkd in Atom Feed +format, which is the format used by most of the REST API. +""" + +from __future__ import absolute_import +import sys +from xml.etree.ElementTree import XML +from splunklib import six + +__all__ = ["load"] + +# LNAME refers to element names without namespaces; XNAME is the same +# name, but with an XML namespace. +LNAME_DICT = "dict" +LNAME_ITEM = "item" +LNAME_KEY = "key" +LNAME_LIST = "list" + +XNAMEF_REST = "{http://dev.splunk.com/ns/rest}%s" +XNAME_DICT = XNAMEF_REST % LNAME_DICT +XNAME_ITEM = XNAMEF_REST % LNAME_ITEM +XNAME_KEY = XNAMEF_REST % LNAME_KEY +XNAME_LIST = XNAMEF_REST % LNAME_LIST + +# Some responses don't use namespaces (eg: search/parse) so we look for +# both the extended and local versions of the following names. + +def isdict(name): + return name == XNAME_DICT or name == LNAME_DICT + +def isitem(name): + return name == XNAME_ITEM or name == LNAME_ITEM + +def iskey(name): + return name == XNAME_KEY or name == LNAME_KEY + +def islist(name): + return name == XNAME_LIST or name == LNAME_LIST + +def hasattrs(element): + return len(element.attrib) > 0 + +def localname(xname): + rcurly = xname.find('}') + return xname if rcurly == -1 else xname[rcurly+1:] + +def load(text, match=None): + """This function reads a string that contains the XML of an Atom Feed, then + returns the + data in a native Python structure (a ``dict`` or ``list``). If you also + provide a tag name or path to match, only the matching sub-elements are + loaded. + + :param text: The XML text to load. + :type text: ``string`` + :param match: A tag name or path to match (optional). + :type match: ``string`` + """ + if text is None: return None + text = text.strip() + if len(text) == 0: return None + nametable = { + 'namespaces': [], + 'names': {} + } + + # Convert to unicode encoding in only python 2 for xml parser + if(sys.version_info < (3, 0, 0) and isinstance(text, unicode)): + text = text.encode('utf-8') + + root = XML(text) + items = [root] if match is None else root.findall(match) + count = len(items) + if count == 0: + return None + elif count == 1: + return load_root(items[0], nametable) + else: + return [load_root(item, nametable) for item in items] + +# Load the attributes of the given element. +def load_attrs(element): + if not hasattrs(element): return None + attrs = record() + for key, value in six.iteritems(element.attrib): + attrs[key] = value + return attrs + +# Parse a element and return a Python dict +def load_dict(element, nametable = None): + value = record() + children = list(element) + for child in children: + assert iskey(child.tag) + name = child.attrib["name"] + value[name] = load_value(child, nametable) + return value + +# Loads the given elements attrs & value into single merged dict. +def load_elem(element, nametable=None): + name = localname(element.tag) + attrs = load_attrs(element) + value = load_value(element, nametable) + if attrs is None: return name, value + if value is None: return name, attrs + # If value is simple, merge into attrs dict using special key + if isinstance(value, six.string_types): + attrs["$text"] = value + return name, attrs + # Both attrs & value are complex, so merge the two dicts, resolving collisions. + collision_keys = [] + for key, val in six.iteritems(attrs): + if key in value and key in collision_keys: + value[key].append(val) + elif key in value and key not in collision_keys: + value[key] = [value[key], val] + collision_keys.append(key) + else: + value[key] = val + return name, value + +# Parse a element and return a Python list +def load_list(element, nametable=None): + assert islist(element.tag) + value = [] + children = list(element) + for child in children: + assert isitem(child.tag) + value.append(load_value(child, nametable)) + return value + +# Load the given root element. +def load_root(element, nametable=None): + tag = element.tag + if isdict(tag): return load_dict(element, nametable) + if islist(tag): return load_list(element, nametable) + k, v = load_elem(element, nametable) + return Record.fromkv(k, v) + +# Load the children of the given element. +def load_value(element, nametable=None): + children = list(element) + count = len(children) + + # No children, assume a simple text value + if count == 0: + text = element.text + if text is None: + return None + text = text.strip() + if len(text) == 0: + return None + return text + + # Look for the special case of a single well-known structure + if count == 1: + child = children[0] + tag = child.tag + if isdict(tag): return load_dict(child, nametable) + if islist(tag): return load_list(child, nametable) + + value = record() + for child in children: + name, item = load_elem(child, nametable) + # If we have seen this name before, promote the value to a list + if name in value: + current = value[name] + if not isinstance(current, list): + value[name] = [current] + value[name].append(item) + else: + value[name] = item + + return value + +# A generic utility that enables "dot" access to dicts +class Record(dict): + """This generic utility class enables dot access to members of a Python + dictionary. + + Any key that is also a valid Python identifier can be retrieved as a field. + So, for an instance of ``Record`` called ``r``, ``r.key`` is equivalent to + ``r['key']``. A key such as ``invalid-key`` or ``invalid.key`` cannot be + retrieved as a field, because ``-`` and ``.`` are not allowed in + identifiers. + + Keys of the form ``a.b.c`` are very natural to write in Python as fields. If + a group of keys shares a prefix ending in ``.``, you can retrieve keys as a + nested dictionary by calling only the prefix. For example, if ``r`` contains + keys ``'foo'``, ``'bar.baz'``, and ``'bar.qux'``, ``r.bar`` returns a record + with the keys ``baz`` and ``qux``. If a key contains multiple ``.``, each + one is placed into a nested dictionary, so you can write ``r.bar.qux`` or + ``r['bar.qux']`` interchangeably. + """ + sep = '.' + + def __call__(self, *args): + if len(args) == 0: return self + return Record((key, self[key]) for key in args) + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError(name) + + def __delattr__(self, name): + del self[name] + + def __setattr__(self, name, value): + self[name] = value + + @staticmethod + def fromkv(k, v): + result = record() + result[k] = v + return result + + def __getitem__(self, key): + if key in self: + return dict.__getitem__(self, key) + key += self.sep + result = record() + for k,v in six.iteritems(self): + if not k.startswith(key): + continue + suffix = k[len(key):] + if '.' in suffix: + ks = suffix.split(self.sep) + z = result + for x in ks[:-1]: + if x not in z: + z[x] = record() + z = z[x] + z[ks[-1]] = v + else: + result[suffix] = v + if len(result) == 0: + raise KeyError("No key or prefix: %s" % key) + return result + + +def record(value=None): + """This function returns a :class:`Record` instance constructed with an + initial value that you provide. + + :param `value`: An initial record value. + :type `value`: ``dict`` + """ + if value is None: value = {} + return Record(value) + diff --git a/bin/splunklib/modularinput/__init__.py b/bin/splunklib/modularinput/__init__.py new file mode 100644 index 0000000..ace954a --- /dev/null +++ b/bin/splunklib/modularinput/__init__.py @@ -0,0 +1,12 @@ +"""The following imports allow these classes to be imported via +the splunklib.modularinput package like so: + +from splunklib.modularinput import * +""" +from .argument import Argument +from .event import Event +from .event_writer import EventWriter +from .input_definition import InputDefinition +from .scheme import Scheme +from .script import Script +from .validation_definition import ValidationDefinition diff --git a/bin/splunklib/modularinput/argument.py b/bin/splunklib/modularinput/argument.py new file mode 100644 index 0000000..04214d1 --- /dev/null +++ b/bin/splunklib/modularinput/argument.py @@ -0,0 +1,103 @@ +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import +try: + import xml.etree.ElementTree as ET +except ImportError: + import xml.etree.cElementTree as ET + +class Argument(object): + """Class representing an argument to a modular input kind. + + ``Argument`` is meant to be used with ``Scheme`` to generate an XML + definition of the modular input kind that Splunk understands. + + ``name`` is the only required parameter for the constructor. + + **Example with least parameters**:: + + arg1 = Argument(name="arg1") + + **Example with all parameters**:: + + arg2 = Argument( + name="arg2", + description="This is an argument with lots of parameters", + validation="is_pos_int('some_name')", + data_type=Argument.data_type_number, + required_on_edit=True, + required_on_create=True + ) + """ + + # Constant values, do not change. + # These should be used for setting the value of an Argument object's data_type field. + data_type_boolean = "BOOLEAN" + data_type_number = "NUMBER" + data_type_string = "STRING" + + def __init__(self, name, description=None, validation=None, + data_type=data_type_string, required_on_edit=False, required_on_create=False, title=None): + """ + :param name: ``string``, identifier for this argument in Splunk. + :param description: ``string``, human-readable description of the argument. + :param validation: ``string`` specifying how the argument should be validated, if using internal validation. + If using external validation, this will be ignored. + :param data_type: ``string``, data type of this field; use the class constants. + "data_type_boolean", "data_type_number", or "data_type_string". + :param required_on_edit: ``Boolean``, whether this arg is required when editing an existing modular input of this kind. + :param required_on_create: ``Boolean``, whether this arg is required when creating a modular input of this kind. + :param title: ``String``, a human-readable title for the argument. + """ + self.name = name + self.description = description + self.validation = validation + self.data_type = data_type + self.required_on_edit = required_on_edit + self.required_on_create = required_on_create + self.title = title + + def add_to_document(self, parent): + """Adds an ``Argument`` object to this ElementTree document. + + Adds an subelement to the parent element, typically + and sets up its subelements with their respective text. + + :param parent: An ``ET.Element`` to be the parent of a new subelement + :returns: An ``ET.Element`` object representing this argument. + """ + arg = ET.SubElement(parent, "arg") + arg.set("name", self.name) + + if self.title is not None: + ET.SubElement(arg, "title").text = self.title + + if self.description is not None: + ET.SubElement(arg, "description").text = self.description + + if self.validation is not None: + ET.SubElement(arg, "validation").text = self.validation + + # add all other subelements to this Argument, represented by (tag, text) + subelements = [ + ("data_type", self.data_type), + ("required_on_edit", self.required_on_edit), + ("required_on_create", self.required_on_create) + ] + + for name, value in subelements: + ET.SubElement(arg, name).text = str(value).lower() + + return arg \ No newline at end of file diff --git a/bin/splunklib/modularinput/event.py b/bin/splunklib/modularinput/event.py new file mode 100644 index 0000000..9cd6cf3 --- /dev/null +++ b/bin/splunklib/modularinput/event.py @@ -0,0 +1,114 @@ +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import +from io import TextIOBase +from splunklib.six import ensure_text + +try: + import xml.etree.cElementTree as ET +except ImportError as ie: + import xml.etree.ElementTree as ET + +class Event(object): + """Represents an event or fragment of an event to be written by this modular input to Splunk. + + To write an input to a stream, call the ``write_to`` function, passing in a stream. + """ + def __init__(self, data=None, stanza=None, time=None, host=None, index=None, source=None, + sourcetype=None, done=True, unbroken=True): + """There are no required parameters for constructing an Event + + **Example with minimal configuration**:: + + my_event = Event( + data="This is a test of my new event.", + stanza="myStanzaName", + time="%.3f" % 1372187084.000 + ) + + **Example with full configuration**:: + + excellent_event = Event( + data="This is a test of my excellent event.", + stanza="excellenceOnly", + time="%.3f" % 1372274622.493, + host="localhost", + index="main", + source="Splunk", + sourcetype="misc", + done=True, + unbroken=True + ) + + :param data: ``string``, the event's text. + :param stanza: ``string``, name of the input this event should be sent to. + :param time: ``float``, time in seconds, including up to 3 decimal places to represent milliseconds. + :param host: ``string``, the event's host, ex: localhost. + :param index: ``string``, the index this event is specified to write to, or None if default index. + :param source: ``string``, the source of this event, or None to have Splunk guess. + :param sourcetype: ``string``, source type currently set on this event, or None to have Splunk guess. + :param done: ``boolean``, is this a complete ``Event``? False if an ``Event`` fragment. + :param unbroken: ``boolean``, Is this event completely encapsulated in this ``Event`` object? + """ + self.data = data + self.done = done + self.host = host + self.index = index + self.source = source + self.sourceType = sourcetype + self.stanza = stanza + self.time = time + self.unbroken = unbroken + + def write_to(self, stream): + """Write an XML representation of self, an ``Event`` object, to the given stream. + + The ``Event`` object will only be written if its data field is defined, + otherwise a ``ValueError`` is raised. + + :param stream: stream to write XML to. + """ + if self.data is None: + raise ValueError("Events must have at least the data field set to be written to XML.") + + event = ET.Element("event") + if self.stanza is not None: + event.set("stanza", self.stanza) + event.set("unbroken", str(int(self.unbroken))) + + # if a time isn't set, let Splunk guess by not creating a

F{h9I-* z*~?lDHE*di5SPT55NZ$BwUX5X7S*s!np@ zPC^#4l28?_|6eqWh(nfLva9x*UBRb}`}q&1^>?aun;Iq;ap+@wq9Jbi0~oS|BN#Y@ z;g-HGHka@VM=-v&n`M-ixwMQ@g-aEbR=Bi+Qq^5`*S;<^t0>=c*SWkZ*WBCg##h#t zcJr2d$GwZA9P9G-@kVqi*PD0TccA|7p5L2BekY$+yr=hAqVu0&m$nbq!h+~jmYKUf zI&az$4a&M}987h7puO~*{=am-53#0Jn1;kl>HEmN)0@F;NIBQS*pbA@Vu4rh0q8Vt zH^c)<(KdRGbs3x)NzRv0w{&F)C7YngWh6Tb3x~Z)fiyZ!(^1I7q7@xZztNkPiIEnt z#z@Y_Nd}iv+#6(*8wWJ>^TV2eJ@Zbb+aO+GiKuZv3AhKDNuU~gIvJ09YUm8cO`X3u zUawXhRgn1VhDgU8ExmU9fgBZ)A$R`oV~iVv#Ori~|1{qMd182%69nP&MCWW~ICIA8 ze55C^wbc)x+2{Xp1?0?`1ImR=Qb6fE2hh*$8UIeObygAjEnaHvmJ9&|aAi{wNJ_vHsh0uM{)L&u*@-Q5&r06=w#R*gUIOQPY8#p+8HQv>%wMHRkJ^k&Ujj`&dWaad@ z$lagFsI9y{IX`2yv$!lP36Yg=vNQ%b(TRIzL;8S`eL?4qX6AgF#C9{gnbeD9ZmEkM zoXEo@Hm=B-i%FfQ5VPP7Ko^Cm8)X^wR|A=<(!DEy)Q$2W|7GgoGIQs_iBZ_*00-lczUjj*F?pav``C){fy4W5jI+17F!GpeHt(6Mr_Bp#fzU!XCak zejg%$d>@CsUr``f)X~LgHJ7i+#MkWRDw)L=&lVp{GNkgM@AM8`=cDK7|2OE`I^S88 z9TcqL(~IgKvdv!r+!F4L6~}gLTyD=@HP6gnZg~dq~xf3sMg$=jATd46nTPX zRl|FQgW}exuz6tlMJR zTcc{Lg`?I3U$$DKwN{HJeDh|x)q3gI>&0Bu^$?tq_Fb^PfLLj81BiV*xGED56|F#O>GkMAWWQEEUTum2zRZ zQs7U8zssd+rBJ$EDXt7W@X;8wd3;3MkdZ|U}Tbo+a{u`zg`N`h`AW3E3FJQrsxpR4VIB($Z>VS!6Eaqf&YnGLICH4>$S$#+&g*+`XAl26prjZ7)i$d>`BL6A9*v-83Z+A( zA=9{PH0`9}7MAm>ZW{dOq)|GoOFv^Ytsh{|nR>CYqqIYM%GO63J4-typTqmn(k{v8 z>tl`GrQMA^r9JXIRNvd!SK2T60_q(o9gzHR{b1=J`W&twDjkyVXG+h=_ud!UCOXH^D9^5$MjBJ|dWv4Uxj!`=LDZ|<2jJ;zxWA3rXiPCXA?{@a! zd5`-%o=@O;ud@%&``m*|6L>R;H~XCfcyqu#iRV*p);Z`LdTfM*=Qy5^%kw!rKkuBt^9lDH+J6Mk z6V4=_C*}D?XZW@;aq@kx??fWlQJM2v^;O?3*BUD=`Uo11&oW2UPypQs+CyQo5()dh-@~$vBB+OMS&jf`UQ$ z>y~MBtjETBqLXmaDCzjQjeN)07}^9yJIT$&Z3A_)^}>3xliV2AC4O$HxXG3nc_P>T zclN7p9ay#-uD{rFY`3WdhX=ITGMGg^m4_a6u18)_c;G6MXt~w|0=TY0l2{1xpxmbC zSDIBf$N&Wu-&blk%d;!>ijT?-wU>`p%L~%;PHV+&;u}qytsvd-9#oZI$^d*KH^@d4 zm`IhZ`NdM!f3)JVvhJhU52qSO$A8OE$ME$*>B{+;TdK97Dh>NOT6iSDFkXkcBv8SFz>H7j**YUNR|1MrnK z5tIq%X$&1_^Hq)Rz`@|TTGt289TPJATw?QP}Ex3!Q|v^d7PX)e_nTqeG`tsErh?9qvz9P zZ}#Nd54{z;wL+S_7e2l1++S_^?mg^5x9;eA(Yt#BDEbL|)urgQQpfJJCr^Z#GqzJ( zsQLCpR9_DvAP5xtIqV!T+_T4D9=8e2y%3z!4=VLlx4(Tj+>^&UV~>}|2S?RCaqbUW z%CXPbGC$|EX1dV`QWgP-x-Vcd$IiZR`qc>C_{ThU;B5MVHka^f!pk;{U~%#24x zz^F8ez9zy<)ML+L34%`uCeDQGF|l=NG^&tJjdP^aM4NOrIXR8xKR-FCAMY%J0yUYD z7OO4rA_XA_Y9uK)?Uzo^VWqpX#!imu@rgY-E!VS~E#LMQTMt3g72lriPGQ!T#rNz+ zy4`faqfz3rVI!X8wY3VFdyKJaYqjZv z16CI?N12rDHBEkHDYaLGK{m1UGY?PNUaR4jedr-&sjd-!Q+B1zwjTJ03uHh;5v;E$ zFnQ(IPyj5DzxZGSZ(ds}$R70r@nXQ%8NJ{OL)l|X!G8|<&qkhI8akA*eT-w>m_@kZdV{Es@pPHs7={t&(8b#KlPV9x;#i@o7}Asi*=4DW|z z<6D8Xh&kmudCm#FO)g-Ja;kagOUAtE>~Kcj$zoo6F|Ryl^ce5}LkVXmaAs{&cNm_21Tut~`nLJFqF(IfKM7{W}Cd~a%M3J({$+~t*fv2e8YCe!;XqIUpC06nj^ z5OTSi?>1zs&4%1!Dx4K|*L6zCo?Y7C3wR04g>aYlO_Z?5-NjtOo9#iskxP-*&=u$Q zc{0IsFZj6 z*R5t}xrVc|-9ELV&0(ioO&29`(UEIwRabbN*E2ovFVSJHDb-T5v*+yy2O(8)8LBM> z7UY8^aN%92JJ;FSx2QG5jo)mdVr!vUYhw}gnw=3`%YvP;}>x zYNbt=e3$-W%MyFr$l$Y!Z1fNkFbU({*gBaM*u@4EYcjk)Z{1IV8JODMv4YHMQ}mP| zwV+z7w1KUuv+5Mu!x&0b>z>#W>Ns9}kiS4`!IoNi<>xpGhfEvolcDVDs?$@TdC#4? zP;XTsU0y*ZYS+%%SQe~=H?dC<)LH60w$XvhTo) z;aeLC_&Xr{-!(Q9^k&dAQMo`-_J-SNJ%EBC1RSb}mC#xc^|86o!xtj8vd@&$wQaQu z^H`KN=xp^GSd>UJtbV;Eti)-p&eh%1dZbf!Z5|pEh_DG4230*Jkf7ByuUdi97cF6@nu2w_UYEzH0jRTsnM;1>40~E!Lns9W}s)sGuqO1Y#PL@8#7s{)D*>0 z?^Ha_MJ6ul2aIH*DjbKX0ZPECsk+CbIpT{85u?%XgiDL=#-AUbpscPq*vU{HV6?6AarLLrjiLwqdqE-aP9%hD zsmpBj2_zG63o;)v?sUko$SgFM$S7AR?bq)wy7IIb6;BCN-aWonTBO3DVd#t94(53b>dQP__tb{RT4L9wbJw zU?#1CxsTA{8~N;r+1~#|C@X&eyCEi;&(3(0@xzHv-ntO7{a3hK>?8K;wDmxwb+-bC zB%CsI70!!_aYiJ+kgqmMr3sP~pFAIT+4JtiCtsd{NBXmcqe=Vpgg=dIUxfa(Mm>N7ZY79 zu~J+{lyx->V6BTXx)PvE#W?FKXCDe(9}e0W5TaoK$fI##yLcj+_{}-DdTD_J3(>yPKl^P-flZ}y3^)AQiYzS1m+8ob94wcELb-f@wNlrLSCA=}omLgE zheTh|X94O48fk^-CYw{wS)uzo^VYRDF5kR;Ey%n%edEf_tJl=WS@ZjuTw(Gu6Jbo< zL|DKH6p$f!RH(Dhfi+z#98VTUsIpkelwx@mpZJgU_~)I%iuU@BMS-CAI9p0ep5Egu z{K#n>@MXl0oWbE&*2&@UE9d6iypwl^7GR&hmk6HZ#i;9dd@9B96J9 z)VkmfyT$ngM|)x)A0Cz+?#R;4d267z=mOOOjih&*;73tNog>%yJ`4Emqs=JW`;iP?@+~oW$)jUvQOL+naDnpFCZfB3532?$q|+DzXg4@{2BP5b zl>@J-H=;M8)&ZWsR$g8ClR=`Vxur|Eh-bsYRWT~C!dL;=`|!Pl5LRh=@sHnWe6Tz9 zQJYT<(|BxcSRDhtQg{nHiJGyQT-V39=JZ4|NO`O9_^J}xkdHUrhv4H0Y_*AGDbc9H z+|*28+q@|`+lJwv1KzpOV0OnUq?$n|1#ntVATke_?dP^#2JUDqoQ%~6ed-C&TkuuF zxWlnC=2J$V4hlf8-bMorJKO-ZX3YmYxA890D?iI}TI;IJx2Bq9z6)^$k0UX})|lBI ze;UMkgxE37Y0{%|1t0GL5+5h(utj+`KaD(cKJsduIXbMHdmM8k;fDv8x+aW*gr9;F zI^BUg+s|;BwDTC{S*2h)i2K@8sFgyksE&62!Wy1PPt#8nTOXJY+};L|1h7aD=qSXP zDWV+fXKD?*i;5t7pD8O?9Ux_(l zf@EI$lcK4#?e@6c3p(sLnf?N2I)=R(F(I7C_^oQI?oErmCzfF!1&|8}VIDvRrz2nw z31iu8jq&tk*}QF7AQBLdrEN^uhdg5mGOu6yf!jAq*RWTJupT{3RIn=ET?8hSgUr)5 zOSJHPFwt2gFmWgZtC-6OQvn1B@k!%&2&d_#T+C;K9M;pVG{ABNil;y{8$mQtEbXqs zYE!T+wHH-8-MrF}0}rF-{Z47ug4=}2?z>*uuQgm8BD<=T4_{)uASI5;Qm#>1t2I|v z0oN`#l9Ynj>us`QaFk;B!Yjl3(^@GF!&^={A5N~QRv8Y$!Ur|?p=tmPr5*7!bt75u zi7`#_+{5Y)lQ)@=!>S6Bpny{Wzg7)(ZNv~6imXU2vM@QFVQRsVu>Cikj@@#`fpnjl?=RoJpQv6IaU>!PNTl%}Y3?FE9$g{5Hh^Q5 zl1`!o)>}eQUn%3HOIbI!ly@=+wt5MJa*j}z!EQyqGKa4^O3ptLC$gOwFZ4nrJT6u=tu!wi{nvVlt#Se+6sjfpz~IsN0o{^*FY2OB5F@+KSv`+ zP>jWa|K5908kUHL^6NLIZ@hl#wNi$CMVqd>;^HMzkI>2?ClnJ5-l6WlG32Fud-U6A z*^hn(?_dcea0kmkR*(sXZg9W73df79+ONTb8@f@c(}n1i9ceF06`bm!0DO?EK?mP9 zZkk9>0@(7$Z;Hh$_;{K-!aFCiM7~K~NuQB!f=5myr`6|BE#_hgJW%Y5Y^8M4hbS}F zI?k8jw}bP#W9>DfLzEB9^O|=h)LB&S3Zg;0o+=J{P{Wh1R->T|v%PDZp<=f_@f72VXF86Fxk8m$Yj<#x zd`~erP&}H4ZW#y@go7WUgVuTqnr8~W%@lUfV#h*&VNSgUEe_T-UHS-Mv@RGpgU{|a zd}}GOkwiX;6KxYG>*)P{F>F)pB*OCKrulscR=i)t`MK`zXVD+d)MfOqcl&NaTh#pv zN#!apW3nKm3he?$nSj0`>JcL`e#xl0kXYzMxdYRzueKbsRd|vNn)4V`iV!|h+@$vG zOd?JTpU~SfshxzV$OB$#KH|cM*2_H^ogd&7Q-U^Pf6L&AJhX;|6E<(!Zes=U8SElF z>~x#X>DD|fy*}W>vtQ^y2xO7yzSH&EGTO{7VW-fGk5C=0PQYNL16yw&2E?%Q$Y>)X zAffd(P(MgoJxjJ7(u}HV zIzos+W~GHlu6bV=r25k+7n>qT(lG;ZlkYlcVw_Sn12x0FwH#!l@QYxm;Wp-6CALGC zga=XkqZr|^2oZb;BnRlZ1U1Mw?tBGjQP}xk;IrB4!M$vxAIIx&_Q1C{TGzEHU40zr;sTGRTO!OYEe=`xsp?2AiGw-ZwfZM6@r;C)Fhv}-FtQtmC}u|(`! ztZEqJxO|h)+bM~V*~P#d)KP^){3(USxk$?OqtU|FowbnGm>oIzN zBY+8!IC90}ucA02-jm4ptOJP`45=JeEth|-XN|{thL0=6Bt-}aX%!!jO8(j;{cX^V zZJhQ)fJ3O^TT;wVx!;ef__+oJ{)74%G~x)Uq#EO zL+yh+VSbnhr@e?7km*M=NNl8J5jca$`IQ#@2U!kDom6YFT#hXhQh%LqxATqt4HWzb z&WD`SFgc%r#fZ9(iHGYz;`tOlayw>ipO{r6f_JWgKz%>mB^9fS=NQEj7nVReNFs=) z;O6@sLd9_y)6w41gJG7&IPTj_B(W|XgljJ}A$dcSa@Q5#MV3&6kboAP>Rog-0GXim zcL`Qnz#^*J#TYQz0u;~qpC8obE9~y4nfx^-BqK%KtDj*a1lZjvT@L$GDE%%+q_PYV znuHKd=hOL2HmO)v#V7vb5?;i|v(X~vw5P>Opzj`u+d}RhrIvVVtZyIjCK3bFpd6Pn zxLcHVa`97AxG+nys3n{j`5f}XjgikIKjQ3^)FXsFXA%mxO2RIpn)r>C_SXFXacTh+}$<9ea1cLn7pRxoW#FVk8`E{ zZ|`;}txfUy43XcVq^vQgy~LRQ*ca}=@cMg5EKOp?T0c>M;`M#<)@c2pud!# zb*%STGlwU*=a3t6VBnVvk`DVtslUU4T2~&Cbf=`FLYt}S+O2Pc8&UXapWStBu?{xDrL(0sg%{&YS*q~aL@5>?996i*8qV<`7a*UBulxeVqj1QnX+S|_Y7wvuhw><{Az)#j!0~F0f;XgbDUUWw+HCLcUD2-KMLYT9 zH$PYKkv&Xap+&B#{sCS>Tl@mP2KGHKKSns9@Yb3=;!FER>(=SxP2^899z75pQ)Nj- zxb6WelC@4u-?@%qW%Q8sY{#enF++sZ;tpXte#5)IUN|w%cEOv~Mo5BHK@c zjEMFL&Si}_2q7vN@F}z4sD`6gTl= z`4DA}=|giA;lU7HYr6ev9}Vj(S#yYPrOy{9cW`-mErhAsZzAwuyYb+i^L$9`Dd?Qp zhZWVte({1NbHV;wdjDJvJA-fduCAf}$&-;V+y~JZXD@n{m4xdlZM6^d;h?W_-{haT zGydU|$KJ!Zf~{NoP|p$^=^K9g`h9SGh()+n_FI0X-Zz)0FU9ub_AG|_7G|mbDU)Ah z^3R_<>0R56z6I^@-cnJyE7E0Zn%sl=D`>C2frQ>FKXllrKg+ilke~*| zSg8;;ktOVTf-_kCO}0)igYPVCi7FVDr7MT6KgyYMQyOq)4{(#diZMZ>L6@NERP4@G zTWBG{LQ%G4E;zE)J%NW}&qM~K=*InuKv3wd`0+l__V?|$-g+@)oJLHAC;A{;g59__ zCYDSAH!^S_^B5cJ{#dUSBY6MUXa>(9F&rQOodA34-{48>@&oXOxD4yPOn`+u1D9#; znSSov3$)kA6=SF;1!~&549*Yj8Y`qE*o@3EbfTOJDMzc0Y4!IAFizN9c!&{|*rc@5 zM3j>71N9U^o5qPcA{4H4!Kal7@OSm$T-WM@o3tQ|?OX^q7+Y3NLG#j=3dUje!q$b| ziz^1+aX*PGqbWx&q6+-U={quk_OTmhZsLwweCX3e$Xkoi%Nn(|pibc&<>xVw`e#fo zGvPK--$nv`A6KFgIwAZ233?G%_y%XGO}sqLY}(KC&vAfl#I!|)n;NprmbX%=UGkhk ztcT0III)3sdTb_XE1<-O@0kZKWbB|gVxkb1Ac)Trk#CU$)j}I@VR= z%htU&aJ4ttNjnIVThDYd()$A(MJ906n7V(*#}!|ka%BBnCyV~_^XMJdYk$)p;(J`& zu+-OkYU{VDob>6&R>|~a90Fq3zjkgHumG_4*My9!l3&$C}+eO&CS5Q92QpxR>dats* zw7wuS*&}mF$Xt|PbkZAp{e2tzF@GH05?|NGD2Il95;^8_O$&xY0S{#!_`XGFCJ0U*ibavD8nnbSr+k zbx|&O>b?OEKz$ut9w$FZ8cTur3*N~2fvvFU>uF=c))z!fdJltNtogEICYl@T2LTl399AzMYs9qb|7RA=?E@28Rbz&t8J5$(j$PaF+ZqhN$l!yZCH7vXYe2rnQ>BVr0iKW*#i zQ8_7o|BrBq>JA|N4j~;^f0x}VBtdf7edH;Z&wrmufiH)7IxjBH3a-`#3HV<15iD#- zTq%hjE2JXDJ^EN%6<=RXsJmJ65Q>iNYg^>G;*c91U8l> z7%PW~P*cKhQl;e!QE#!cK|G74jIQpNEX3znjR%7ZO1U{r*7`+c42wqls9D4TV;WbX ziU>~3^03^>S|bQnWUi5C{T)9h3~EYkGR(@OB?|aUN&P8g5!si8*AzZe{7d6sDtpheMsUDA zn!rW8q>kQWN{bmbb3(Qf8BqL3b>JdC9*2v4tN0}h1cbr2%78HZL!^+C{b~wl()2Lq zgU~sAS8TwyUg-K5sRFV~O(+1I&ITUw!grn!oet3x%SrDnI4-n=WlHcgy+%vP-_yeEu@7(%c3u@EQq9-zPf3%VuXDr^jIB61>>D$p%N zssLihm?Fv{V>~|)1%0g)Z z6!QEg%f7+n-!u7rBy^c-%Sr#7i>6uH#2|0jP6~JP<@flsf}91T6Go=!_Rw@pjS_aW zu*ZMFx8GqxsNl8i?He(;{KXi3c<|p?sY|RR9D(##xnHTp8&=U8$7S|>3Z8v%yAe^T zi@e@Gl*aQYX%ug>Ry#b-iwJ2-Y)d%T`I`dK^ziFo6kYLSfxqF_eWHgcdSc|$)oYjE z{8W&-_Qo4G-w0CIFWtHHT9CXkef?&TynE>l%_LHTOhUH$AM{VGy9 z-tf?mAT>3mY(oS|z@yy;inXTSR$l)36 zKyNhojd-NU+uJWXsI3DkoK(lc4Le-Rn8){3Xl7!5;MaA~kNg7BruaNJvSLiq$BSFt zqnt8WRae|m=n$`>@=*{0CNJ0ny;w6;9eq(xiNdf%7jz?AHMq< z6TyOxbSUBHJbEt4sQ=6I{&fqA8kZ50juRny)PtYM8rR3jeWr7ail{~G9P7iz0rU`I zLtjk-CZIoE&?X0b@ELTVYSpRGdnC&p?Yff$^c0f;XhX+Q*p#pm|Kig+xGllynp<=` z!W!B)(M>gtcZjWoh%IPX;VrNKML396F&n@=C_?Zx%U1%QNCnn7*M$hWY2QH>@D@5& zI5z*NI#7kUf{#b#3 zj5MDUO@y`MT31x#vj}a}H7tF>OMbB|38kcs=xO=pY@H09mGjmfI3F-}HZmfS(=Lqg zDwrrW_$4#J6-E4z;3_vQL>gn6GvvAmORZiuX;C7FfI9A2HXQH7IFCv@qWuB|rn%rRmWKE`ddMT_ zQ~9vc5CPIp@`^kAW1RLlGKQ5jkBWwYd*`r$_#-(ZU|HIcC(gXVeb9>*F&eguZ1_6C zCqq~e2+0dyS>t02Rz&(j_1qj5p(9g*wou6L$!4?A+7(MCKFZ7c^Q-(71BA?R z;S`BOkvama7XCWzQ1=>zC!w@*qX^fqq91(K+>U-Z^2<}>517^@jB?R$7{ng`qu!dx+9N+` boEoW*7DkTd6C?XahDHh_$MNp%kx%?T(oPLz literal 0 HcmV?d00001 diff --git a/bin/splunklib/searchcommands/__pycache__/reporting_command.cpython-37.pyc b/bin/splunklib/searchcommands/__pycache__/reporting_command.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..04b1e394c2e5b446751b05007fad0c5efe4334db GIT binary patch literal 8352 zcmb_h+iu*(878^s)oLX>cATUK3{o`qy3Ptofuh|uMG_@$4FqZsxu>n#6-CZUlzG_B zkg~LqP&6uv-lWgq0KMsT@A?MqBk-;$P#~|+tA78?klLl>*exo7Tyn_y=RbV&pZM*~54UW=}eujAVCJK+n_ zjqwfJ`hn$l{gvNY{t90|uZ?f|y=`l-`Z>6<2K9W+-IGa}rF;@ZvqYqM+YgQc86G@* z!;=}$uOG@Jp72-^kSQqdW5SKACgjZ@&1gQzuWzSq-}6^--}U?c8h%%x&2|48w0Vv94p*ViRsTBl`9i+_5hwj!uw^;w$EeeGsQbD#tjwbyQ#~dsmx0=!H3;0!(X{bLC({9 zkxXPUb0yKMaoJ~P@rUiq%GzUKz_YSi75lf~84K1r=X+p0nj=&u=Jl`gwPm`8}B_6>( zQw)Ly&kaKkrbsIq6(Gx#*^oU**_3mrlgJxs)vU)*hngR_A&CIhYn|_%FrQCdWeOuY z<|<*0p$Y|TK-eVKx=aLTLG1HmE~K%@&y$QrnM@h>Psk%4r(kdt@$_J{t2x>X0S^g0c<>YjGR2pBaur+9>3_$Q zTJsrvF~ify3tcHkyMExMy9B^uAPi)Bheijx(4kD-7#pJsYqP5Xkp)MhmVfID|Mc!C zya`u0Fh)?)CI$3aMp%XBig(q902Z(v-Bg*Hog^99)+Sug)v%lc5Tq;&9%;Pbb@n*P zLPBOCY}Ie=E>0$pRuJ;v@TU>5z+>qYjP`b!vcUpCJvS~@%C&rE6RN$uusXmknD(#` zSNUA>VQJzeA-l7^`Qd;8pA;Y18)V%#hYW-4O(5~tS%4j;xZ9qE>7Om}0v`N( zZ*#N*-u8tXvHL;D<&Iu=ho|0-JDbU!hwr?89EEUKk(+{toxLF5k&3r4*xS(?-6%rt zlzVAnhK92faSaQ73x{-Yvh4ZGRb>wg?&iN$qLbsgdw2UJPTk|t-Megd;7b11FMy*{ z?GApav8vyDk4sMkD%{b_4}9$QU>d*;5J=Qq@&a_@WkQfI0HxQf4&5j>Z|Vnm4KEF9 zdCgC}yk-1c-ac}}08xc|N*0Jizhnnx14>ju!-{dZ#@(l-(6T2 z;OK89>4U-xb07G*4w>?n;YEQ)pKf4Wyol2eacI+a<~J^x7FsOvN?2{-P`aAo;?z2{ z&g@g$w;wrz;m&^I5PnasNsSPUCk~!GOzU{o@M~!kzpZn7TElPq+&Odn`jgJ7Lo*I* zlP=9kSNz74Ub>q0pRAqQpV{Z?_ym~?c0_Yau`F(4@%f6z)vI9f)qmS;e&@$AiMbyK zbA-qe%0;ZzP#Uq1&j#yxL(Nsh@|#P3ut?eRKK8(Z2Nsk|<@MN&IP$DiU{59rv@-7k zt)Wj(WL`f^f;e9*!?QM3-uoy6sw4is5Q)f}dOL~^NQyP0+e!IxIw5=vYRY8dEA$MR zwBG0!Hp^QuJ^;qI@l29+EvMBeQsm|5HuUU3 z&$W5{RQwldjJu%XD-$)^3SS8A1?bLi>TP?6oZG!(j|{c|=@*tEtu$zGExrWCtC%Ki zcsMksoAX2+s6Jza^|+!Hssqr*;Z36EM%6&uSMgxn8(rq(B}VLEhTuDSWF? zCNt*C8*pJEo)s;f4xRDSPwk?D8L-HN*F;;yIOTEW$-y6UMnkoT)MR(W4%{^$01P3Qdc z*Dh)I%H1^okn(oKM!0Y7*q1gJrFyDRQn8qF2<7jpv=^P7`x@X|x=_N}2gt4{QW!7k zGqo+HBB^}1$!Z@T1l|GLbG=8e!#Xo}U*(+e>=a^5Zb}J~etiL?w1{yJIw&BTWSsHX zMMXf>O~}0zuE(q!K?auTY=ZKGA^UN8^9E%gI%!4~nJ_uyUNAjblE)~)3sE&q3ZW*N zxc2>2Rp`~PTpT41o*&P`zzfpw#Js~$LKqhBEv;vGU@p^gL6u+uY3gYA7q{=;{v{H) zEzlzm#t;#UlFg?8LM6l!yGZd%ek+u= zvl*rSSaR=Vq;jw~?&(zQBEKM;4Qz1>`^7y%pkI>sL5yTvS^x0?>Jy}WCMT@|Z-^L! zl%1+I6@g)6D1p#}IVM$trpX^5Ev&Hl;6jfoV&8k_D*G7{h&V+#M;6{g;-ktw6nPh< z?gKZZHl6P+iA6?3;vyvF=$ZhMUy4>yFxSbxs<$H4Mj$R91+y9VD_mbPlU{IHv(IaR z`5%`Ue2>Qo)p@kxNTXbm5zCmVOIox4X!tUSHgytKW%_yfag!<(3Zxqg0n&x3&5pyp z@n4GVyGb0L+%2|x@oq(jSD34Kv!IGV&RpLQ(%=Z0W`cIZNkMecM#VHBoylxZlG2a} z8|a~iB%Bw24`LQ2@P!`IZV9xRO{_#C5Th;tazy~-D;z^I-L_evw?yy~78I#xLSgxZ zhxg}yTcWxh?8g8x)QQ^vX&X<+>Rpy%Lr6-SB%28Lv2F@r?nPVWx$+TG(f#0<`xox# zIdQ!#3@VE@8ZbVgAa5vtseH`*Pyf4!MgNI*g$Y&EfT>{W|G~Zc_bU9iRs1Q6uE;$O zqAX%@7VW{o6+IYZDypR6HfHE?Xa_|M7fz7;B6ksAgZLkjh zPEWZykSQb2E_l$*Dv>;v)GpC;_g&1F6!ToyGoJ(6?RgfwHDvTr#RIdg`q?Rf6P!Y@Ffhc|u{@W_wx~0GpO2S&AaKq6%eM6I2k#;ZqRA)TBb|daob2@O2cS7@tm&GD&ua5BG=n4An46 z9%ZvZg?MS3CE1o0@l0FfozmCMkCyVEsIL2pg%vu4^qt)Dhdo98u}lde5k2i=v^+Gmb!ojSW6qB7{XzehgK ze!_+B-M~uJu`q@=?YN6xzH0gtlIc^4$pn_C=#)#hT`IOH7txWUvJw&>#8RaFB8d?D zbSw6wBE%Ux?Me5 z+U83T)cH1%dD_wpjWoDkER6tugbh>;4yN;Dw60>@RrHKGmN(D)@WQ&GFsOS?7uH{|SIoX~|0PYhEbxmO(*oPxE#`E^lfQg3*k|9%YEt-oyxx{)I!nhLg44wfj!b zZsTa8wbW}kHb(5W-K^iR>lp9jZvOfug+TzxD z{S@u;Iz@ndMOjmOA3+LPP!cuXg=(LYSp*Jw7pHcuUUOF3j?=}_YB%Y(*-}T->0zv+ zuX`KHoLhtbg$~3!MCn~RQMX;J(22eX=r$m=X~lp}1Vcg1Bh`dbgs$X_U`QQANmJV| z)@$vS)41Mjue8_O?RM+>Z>-x2s#}8>30KpMKCnXR@ZpxI(TRHDg8EH)hrY|DNfJuk z1MT3GI2V+wjDopyu`K*RS9GzWaTBc3>bI!{5W-Fp8IdC>Hw~Z^FL}BG2OI ze$$A>R7@F_Sk=fIRWol^gypqnRSBH?AKx!tRF=E$ZH7gOb z`)+2P>qdVVpSi=KOij~4O;qvZfVrI!OD1bJU=eAZL1up9?Tz<_z>a`<{y;!aOF_-q5MM< zAE_L!PUI&f9qGp z^_l!LNK2}*%Cpro`7=iB`Iy?NcD)l*yX>>;@%%YErFN@5>t_DE{c!mLe$V0eyoxU* z)Lymk9V7pOieHIM-1iNxgo%W|tvKh_E6bi;C{>s0j^}4oX{F?r>NS6G$tl&m!oqTG z-sAnya;-F9S9YON@@%JAaVHEvTPs#=>1qlzw9X6vDNv~y!Q>0H5e$+wwLDX zORJ1u@lfXC#g`{cKRJJ+SgQG{l3OXw+Y@m=wNR>{U_V|hF8T3BX$ddH-O_D5>MvC6 z6}vKn%3MFUXnQY)w|*b*>WhoEgM7IH+jF%pkDU=Tz}c=_k*gMO+J&Msf1|Wwqu_*B zs`8p%uD!Wj_ZZDAI~8{co#W$MMW=>q|AmTOwQJs0$F{$LtrDB)_lM8btJPvny;!Q* zmx_*yCTGvNE9Xnaiaqc7{TFLX%iari5hLNJU$R}dxM=&^Ub4NSDtg89HpkFuC0wZK zgzU?6WxW4t$-d=hE+ZF*(jU~ZYYxZ!)#F|FC;XAJTdx&tDWl-Z=wckwXY2KfUBu|V zyd-niZ`W2zPQAvyu-8{?X9c4^j~+#T!-zeLpZjkyb9O9ms94@qMn0}g6@Mp|PpE`S z;+n)7OR4lbv2`P#w$tT|%0zc5)u*!Zq|ffdlm6%~rE+S(&aTJQ;Cu0WKg!tFcAv8c z)Q}pMHwO_PQQIZH&EAGmMx(m{wL^``lOg1im5){oBaT%s=_819rL%S05x-CEm-r~+ z_bW@{I}m?BjZ1t?-BJhDgV-56)gko|`nO9RRuj1HR!7vsxbCs{miOUzAAb86;%ZVo zg53A3DRmT2Ep<#C$MpeqLOqIo_!z?l(;iok>zH~%(w_|O4ydPs90$q=%MYrL1?kh0 z>*?U`q}-hf?oO)%>KPrk52)R^^%%FPmZXU)g@dXR-aUtah+6G)Ky#`QLm_1ah+1v)N8mNRi9FM zT#u%GnrzK|@IZvvas)C%SR8`e*J+10$3D;-To65oUS>-AZ*E4{D%W4HwQq zXvWI1vax1(X46xonqB`u2iwz>sOX4%e9-=ZEya1acRlDe)Y^Fx2u(fx>GHB0J+!aO11023dmC_ z&0W_m4pxgB++wT*Axtf;a&9mVUZJpn>fC(@Vu_47n#dU$Bh~(&$l|Us*0DyX!k0T1 zU}JL}v>2-~HMMTu!9r~sSU~2?2Zl9~ z_7mlLspj;f7~sFvCEHJMY=C4vV5)GYvaMO2Fr8;~X8X1$1E_X9SoZFk-&$ui?>FV>3+`@3cpNletR4OZ;1ENt_(&0&Gwur18A?0FUjsGw)kD%Do% z^Rk7etgAQfT0j+m#G145igjt#yHT%2#p$i3NrXrAikh-6q5YQZ8LQ~nmSb0R6TQ0S zp(3n7%c<8rOO>#ky}Gj+z$CVB!f=jZ%y8kfbWMmeg|yG&=Mrkg`Wgp1r=sV0&P!fo z=arKv6T@Ze#Wn!R%4RvRHVq=;ak&^9<{d14!o|ind#)IP52rqT?()T}7tft} zv3cS0<(DrvubsI(b8+VR=Ea%kUT&VhaQ2nwn=>z8y>j8|>vvgA0En*4i+2N1>PEey zxL_nj16HfPfV-00bLG6KN5levBoXLZ05~pC4j`b7w~DQ$G8@j0w0+jPRjO2EQ7zW$ zpr_l{KwQ(c99w@F#P5ZL1uPV-@)t=H_g)+RKu7t1D;n%fV{h+OBJGG)2WNpSqs+t? z5NU!q1h5(8_60TsnDI8k_jSSv+yZ0MpEKzs=My?4kM^;S0Tx0($HDZCq zTB%{HXxmF?rmeTuCI+07XuY2Whf{OCVvT5KZq>70XNc*29spf|pwS^U>?fC(5cM;k zuvafQP922)t6ac3p3+;|kAq8+rS50kfHRqh>m53SLY$`=5c}wWz|*;e;7j!{KAQUppOzzBdrq<8A%94P#|!NF@c?3y*}Im`C!Wbl$+$!IOO zH(}|p!n|8H@)7ccpbwmlr4gQ*vd(MNi#O}m>;iZ`d$w&YJ_lAKuIlrCIzZ8W1{=Ht zHpg!KmI%5mn>Le}hxhLdOTiY!JKLV?*qCkGq5k%i$T8rTS_2&DqzH~sHo+zsE z3@Au%*{SJq?5r6xFO*XN9$EmQZMd*EPhhsU+M*o={yZ0l0Ow@)7VXIQ5t{u%N3&bl z9kXoDmvFCS+?>T_a)+Ghc7WE|ev%-)+wXiFkAAhS)w?e@xD@;CA8g*YN)Mk#;?y2&$?xgIwNHcSAEc;mAj<07Q3rAgbn zP2#j8YhYT)f)9C#IGcad4nfMa)g~Ee;haXZbD-L3Yzym1k!t}h!fC?R3^9XaSL-WC z1KClu7M8Itg_y95t_>ms%)e_}%lLH`9#ZCb&GkUWPdNnZ09U(l@kh=Fbz$dRg8&4q zfb%Gy_ar#{TzgahBl5W{ES50=$udS`PiNKbnLom+t#!;!fNVAH?YX%+=4H$5dOo5V zY0UznFCZC=*W5A4XM~-AqE5rK2)pXxT@k z-H4Ty0sK7Xfm#Z9Qb{G2Y+UFV+JNrAXJ{~kRr4e&a}Fc$6O>!^9zEwGGW;=qE(!iv zG7I$9%_jtn=;%^UAk6W53Z?X}${*_HGXI?Q3LG^X`#YzsJ8M)qNlk(yj;T9|mxJ-_ zbA}kZ!g~Odo2K*ZDk&g{$3W*8pmV+xR7)IG%SlbOC{U@OY@U_;iJPW7<0alUEczxyUMt{>($vftnu{!PLHU>a*rS6#DdvGo84VJexnn6c>9Ho!k%)W0p5a_R^jacJIGhN=U&>H8PXxHe*4wcy$ z^LCQ*x`ThvQfT$tcvl(9SWAB<{aP##m5XTa^D%GNdaS(r^XBKw1p@-U+&h`i8{Qso zuea}WvCn}ORRe19os4&1GuZ?aiht5{nbmAFQ{KPUhc=Dr9-~c4_Zj6s1$wS;<9@Xb zOlzNKHT&>R7G>V&J<#k^L(Qy8tS2OH>Nv}7W(xaPhfwbLT0g=AYq^fne<3RUiPvJ@ zK{bqa#n7&P)H4D(O-^6g#8P0VBXev&NzFq zsr?M6fSkCXzlPjS&AThU;ZQoRv?#yvaE}gasli)oma2oHtASIuZZN z5PBR$(8NyK4OJR;+%Cl9D)wFGZm7l}FN7*hnRiHzBxfAH=ozaqOi^5y}xG zpA`D3i;eIHF#2F7p`@WC4r;3a))|ebbkolTk||eKV27k_>SP5Xj*5GFs-uvOeh{Bi zf#Q;GUEM|gdmT{WX2KbJSK@?)SI}M00Mwh*JAx@x`w`dZUO^%kuh=!`Rg?n2O;QfM zODV82&~co>t#cMZt2-hlfQBugC_qo_0!Q>MbxMlr=oBYMIz`@#-Qe`B8}EYnRG1h_ z)6n#|Zo15dG!c^!Uns0VCRS#Hs|;FA&z!6jt8=P&`Y2lcr+5}?JbDjP5Wf9^@wBXi zjs_Eb$!a2xs++q6yu-$B)ljcc?OH2N4KF{7#yNFFd?OoG!Q!I@JBph{RPuTbY&{6_ zb>sWYHxpNMS21_@<2k0&oI2*G1J0_eq7SksE7%ZVCznp2;S{p`RIKs%Jq<@#!o*?c zV|ZV1zR;r1E4cLs&$tj1lfx5UTu3TEt5-Sqwx3fa7ox)X8wCi`klll7Yz}Hp&AOX) z*H2KA=v-iFW~uHc>PvRbj}r)_08+6oo%{GKQG-apPhWglznMfjL^88{#CY9tt|9Im zVqh_-Fc?SRClu76exgKuW}*azW%7gx z1cxTyCygkuRYonJL3JYW2x9Cls8B7i(hEP{w&JCCXIa?x9x&y9;ek7gAeMu8+stC) z4&py!4nnBGSSpHTj4@-x$RbP_qh=|SLgjp2+kk) z(A5;J? z@!dR6J6a5ITpN$B35JdWdJze_DrLQL`J!-@#bu!Fnpe^?Z^!^{LMpTbiUAyEAej#3 zLoE^kEs5~(-LO7I=x7AB-$dlZnXRIG$=t2ERj|FH^qI0gRbRG5#_rf}F58f`mmqFm zjVjhcj8(9zT5PdYEIAa)c1TGurh#|{>~hc%mw0bsxDl(uX2X_ppn~Tj z!|NE57ohLAC#_YCOwGm&aYWHWjErb2qDq1Sy4uoFhsgA{DqMy=D7ZCF5ji+5`nBm< zEiHt4Y-%1rspd@u)&-g#?Ac37GPY`65t59#x_2Y$21mVpET*iNvD}ugKfo11@g@Wr?^N_ znmdv7Q||l?SdjRM8+8}BKum1nQ1bXm*<%wqXBrSm@Rzf~*lh%UzaCOmSawPd$rR@Z zgZKF?!;+{#a{eaMzMpCRE(v28g50!MCz2W`6!}z&q_{sHXHjRECV_zPbO35zB$~GH zEVhGSI|X?(fi0!b;av{*`^;{(J$Qd+o~$zecqJ`y4nH9pN#uf!rLth8X_O6+y&|v& zd^I-!J*iwWVjjrgbt4et$7dSDXXb!ptB~G;xb~*42VJ4VFrNLQ?Ja1`>!KDx#Q}O_ z=1}xbgy%?xu1;s=-2(G|hW!?ygr+aMM3$|L5iY?CErS3`7KuwB41hS`J+_~=Rf)!T zTF+IMp$pf7;`*`&t+_R~3`LD2(AF(2LNd*#MaWy*wMRD~!44Ja76cT+@Bp+CcD6zJ zwk_DkezmsfRgl-2u$&o?tf$aaKe@nGZ6P@U!9e}}I)}tW?pUf@#HBOb<4|1KdMNIp zKiFK(7tyA$E6xw&rgul=(Ra7?VtXg0AY;(c3C^jkS1w#)@(Hu}1`@4HV7~~Ah=J|f z_WUyG&9+txR)aL|MjaZH8}(%<2cZXsRMv4yFmkB@KLRY%1|`HfmOxBd7npefFmT~@ zlnrvOX3Nr8UZVDdOf=9~2M&vX5?Z6hWg)I`u~JG%m7>CCS{H#2ARYin0)|l7{emHF z3o1jwNcL?Q;MEoZYi)F}rT+=Vi+r2LGEu!KJ^)K?W3a_aB*^Ap$e#D&#LsLwhkhC& zZw~$UNAuX<4SIqc^PDvP@k&ZhW^xJ<&AkY-x(`qYkfv!i<9Fh)7X|YKv1oL!gx@P3 z(sM3*ZqtTCYhW@`bQV{pt$?YvDl%z--o$N_Nl@A!J2ZFCA3Lmg{)D>tf{wNDgY53r*GFiAjGO)t>Isbqe22d_JLuj#CPd}_n!W-wu znDxh*Yf#r1nqq}Gv&alWGMr&d0wENJ$t`wRfBiJ4g_KDQJR1MW-_yiM%9s7AB5G(`BLf8zMjTb9J#a>qCydp&<0Y#n6i00v$h)(Vpl0mUJ0j@O=O#_=q%mTbbx1q91k6+^1k zOukDAHkm!7x^vA$c-8MUVIk8rPsZLHx`vfSkqJ-;^q0xNrht~a_*Gr!lW32#&R`tD zL@J*Pfh|YJk>P)yk0~MJIw;m$z~-u-ru`Lk_B3r1#y#cMm!0`G*_rcC@X9Z@CM98x zf_2VMf%$cF9p_oS9FzJhtBu%(LiMAC(Y+l z4J^i-A7v)4A0)74&Oc=!YYA^*ErpeSl}*+p*oZke(yi<6$Qw;E-f1b0wZaQ<-gI(= zX9&>&zqPOQwmYh@jUa+?Trqu7XrZh-u}BV@~r zzyJ(+0gwDR`iUz372>^0|DCWa9i?+MyiI-$7xkR%SI|O* zt(VB3+*6?+Hl)2vyXydu{P>6i@TWc7de_qb;YTn1fA%bWtKoGhL%GXR{quV(jp*|( z;N;#)%V!9hVe13k<(9DFFaJs#7?lQt%I`;CgrIjd8TyZK@BCvXChNCqw4QW|w{Y@m z(bWX#5Ar#ef%6j#IIuW+h1OH11%twrwoMdNCd6nyr6~^pMQ%tzZn82X-jBE{lJJkB zr=Slhy{8>~d?OKvj49Cmjr)*izsl~^rQW}8?8Jc-)J+bzUsD~8{T6H@^mNqCK(yR%j$>GtJq_XFhwIQ0HplVJl1Am5Vfi$P)SVa4k37g@k zgLY_nYVI=BZs3P5v=nxed{eB{iJko<7;>yZ+T>FIM;lw`1!j4YfiPJg=d%$oxgwef zPMgp~j>0wP@5$56_N{(aODs8rju7`JkQ3DPGki-H^q0f;onJx>NMEYwmg%NJg{#)p zgYgZH(;WXlSn=)=yb42N95jK>%p8Kamd3=p%p4ejj0q0S90zZfF-E`)>@geTU5r53 ztF~gJ?L~gN55IHxxl_UR+7Nqh3O-@ckhqhC9TGO(MzWc>W4M#xx1a*!(vwuP$*stv zf=Mh*Y_N5x*>~b#@J?=v{&Eh>)Px1o064ES(rA%m%3TI`;L^Z-F(2%n)>2I#bm?<` zLP{QLrtTQnAnCOX?4OdV52a?CY1-qIM^yiNNwi=)(ousNSU0~{t@XjqXjBbu?6{-1 z2JWP!F>J?e*s@0Wcdkgo0F3ZyUkybFWDJOP!%MvE=C{#ccZqy+}NS0hG z(K9?ki-_Ag;lcg5 z&d4Svu~!BYWS57GT?y`-J+1a&d?1{5AiK@?CH zivT0F`W37(pbBAHJ>-C3Ly$%~6i|xXDz$+#g+ME*cVZqb17qbJr5UuX696CE7$B0V z%>|{U!2*8;rDb}S1~gXKrutCI5KkTgX+a7`R8kvP*^TW;2Z||=0U^P{k9z{UGlhMS z;&~0A#&>}lcYAvlOtCDq0?WdKC}}TRx38JT8@d%hO$Ie6xiJ*R8Ehmu=)ctjfWp}rvjm%g%chQ?WT#T z9I8_~4j`U7u8n*C8K$hU{~Ba86p?qx+#?Bl#DT<-ndFAcA0atRWsU&;P`XHi8Iy&~mpBYFC0HqVy^bu$Htf&Zc)jACo)|(gi>_ zCfk;AsL0!L9Zx(Uw3G9TTr#8~S`gYf$XlUMeiJc2r>T*E!dd4-T9r9J#enPC`4I%5 zGjxs)nZ9$H&m%aGv#z1c>>3{kanlk^|7WKC8Vl>!hdMR!^(vEph`|*GROC5-2Z5ix zvWg=K)eF$e`nym)S3+T#3mwPXY^7Dc6Pcb^LHhhGi{ic%ZE!c(-@t>j+%L}}!V^~+ z7#U^b&~lqFPXb}I2V_$i&j1-DvSWBQCX&-#CgrElg(JkXUgXmz4M>C*Xi^uU1uxOb zophoFawLW1>hA+^13d`P)5ppIM(&WzNv);53z#O0zeH=dko_-0n}zXW4fQ{ z-4Qj6F&S)byQ3{2`qjXbv9QKEgzPTXU%JaPa=5y#H$3`y+RM|9#`l_pf0r zFb)Ai~n(SA3!??@912AXPX-F7(avY|aSz!YB#qdbL}+!0=eqx1&^71h5k!PN$>=10 zq)Oa92*e70gr);HrU4k|Lba=H;o*1yCo|+2HgtDc;WAtvsX@U)y8kv_@1{RIr2R=q zufrVviA@5SZVKW=8@(P`)HibTH{go|B=Yg8W0Q^eSWq8ITs(&lO z@`SJhjnOt*6ypm<1@zd&vs=@gPob~Su2NQ4kmkbuOVKUBl}gcBrPmOE7*WH*#fl>| zqBf$ZIf+9_Q=43mIwB{MDLXrP`(q4#lEI@4M9T0hjQuKu+YBCLAY|{?86#0EWTqTb zmJ^A763g0#TAhz%Jv_(Q?=Yaq0)l9;J+(j#_zIJSY^82qPO|v@*qDW24QdggkkUfe zQjqBUIs?iZv~&>WmSBC*;|G}JGx!e-euF_I8g%{uxBj4})5Co%(8T!~o=l8>2nEdk z_=&l?TGcO!Acp;e(=CcWar_>_P={-jl1?7w(v`DgeoAj%9PZG2%lRHDPMUoNQN66s zGIj^S8dvl$BNEGLiaM3x+22u8(x8*!^~Quy1y}sz8j2-HK~K}PU#RIs0@jxlT4rH& z3ST)$r{(DwX>^#B41xlu;F5<4csPLZA(WbxJYzz~vt-tvG4T}scCyAeX!<=!+k+<* z=V{ciW;EyvWrZB+dVhh@&KOXZ(%)bHpHaG_5 zC&l9k00h^hKM>BY9J2_`Z-jS|mDoAbq0y_z!FQrcZJ=p1c6Y)MU8v;x78@y^Qi4W0 ze}R^MSkq)n?D?=J$-4U%oAlh7t7l%kmnMaywE&aZ%6%W!G|Ba?H7!AVl6z^HY;>!w zMP2sBdj6;=_!b*P7dQ9Ppl}`1*&(pxfwrb~xrNb93-a~0_8!4<;SSK2mJ}uiwzOXW zjIgfJ1PWN2^pR7V2UZsYSncJe2y5BsoJk-=g4l+Pf8E(kTT*N@ZOu@4Bi?sF2uPvb zw585R(K1jD`nSn=GN4G#SdTp-^0!c6M2dklt~_TQL8E79S>!C-^WeNen=Ph=Kgs*) zW3NMsbD>gP;=vggG(ru|s6cykSXj2&*?RFfUz}O4&OuO#3R{P4KHBRi__`3!=mWs} zN2>EtzNG2nE#Hc0;;THcLi^I`Sq*hwk};pP#Ml+0zLk=a2}Yk+>>AI3Sdi{xB$i?A zLFpg5H9ITAGz%v(>^lGcs>_WE?roXewwl7N63WE%B!>`B@Uh_fPGIZ+sK*XPJ7s&f zn+v^DbXgH`iC4rb@f@!17y|0e-U)SMATG(|f=O#LWW=TsL9@`KyKp1)=uk+H(r`7P zM$LwK@Zbl=L1=+W$OiyGBGga91J3-7US#MqVe3JL;;c@alf)_b+T)=lxOl~p=IDTM z>p&+)N2~$|1DB(!AQS4RA){1a7eNo3)lFK>(YaF7OT7&V8Jsx6oKxVJoaQLP&v7htgw%c}qf)OgZ2~DB7TtH@QtxKd z9jGs+t+Uw5iok~_48V6&=*~k0K?Z^;Hlnqero+uhbL?qkrObOC=Z{!ITU&ebziq_? z{%ST`?Q4w*64%z6p@3YN9}EdcBcG}jmltn%J!RN`gbbhJs0<h{K`UkMKX_=^PNIh7`ubb{3eH;s(N^lHzBMDI=b$-V%R4EJ<1d_o} zrC|^QyZNn#s%f~h94_t4#53Wm4HJQ>2rQ|ccfGVqpaj{J*m`m5Ft3XRwl^p~O%k}m zz2#bx;#Ck92@3ttmJ>l+em`i-C`wAAB%1m_J4hX+Xd2rl&fB&Q&Ex85Grch+ZAnR6 zSQ>@3=oy5)kF}KtjqBaYQ?NK#-37}6ctgOOm!y3Vc&}wTO8jhFi5Zj#lb)?w*i1>g zAjB7|fj(^$igPboBp3Rb)LeA?+9RJ0jTMZ=sWSNU~7SjeM|3Lhg-R%D1Zxb zRZWZyCD|f;g}k*UhiYC>c<*wl}oW?NjDWaF|2on>=Clhet#e)ASlz2gU2Qm z58L=qRLa$I1AP6ZPc%^l)i=j)>x{M4dTUtV&Z*HE})F{mJz=))QNKoXJC zDn7mC+0c& zuK4v@#7&TK}^G;F2D%_A{21JnXVQ$d>OBF_wx6U_67Em z<;ILn%yUpg`>~*?DTG2qBlGyVzl(r2IP@Z^3>=_0;R_PFbDp}uZW;}Mbf7XKOo2Zi z0BX7=Z^AA|JPai~SR6sgLC;1@xA!%nICy;&HbqpJ#VH71H$(@2m9XJT>~>jl%rOTb zL+bXWFdyDK3n+yB4Oju2i#vw%b0`~@6ev64{00V1RBcVv*Mu1#PJVn3ERIq))6Vz6 z?kLqvGX;v6l+?GP>zhc?b=3m9j1?+>9VFfnBngQ(9+G&d7a~WDiJ{O#Wze8-AtL(x zWT8-9DUgW$242-RGlBe3XxFYKN%`ZK!pQ>R*X5mL)5Q0p{@0z0s6Wl1?7$MgIEI@R(O=zKTd`E>+- zUzFO@7a;U(u^(jr4k2?48!SA(JVFB>Y_0_GWeQhHmtYcya1+k#Ui=Yx$(0Jlt=>3q zBNA?$wfJoqGyyZk0r?Zngu9<>tC_(1Ny1{rB!0l`#n;l!G|!YdFL-faO{^P@H6@2A zbE)`MF#9a03NT3prhv^HiBP7%g#?Mz6pt9c2_ILiVi82MGk&P2gk#z1BZe*JD4wwJ75Loj3dtZpj{U3BY~YPbsXW6)yuWz zWKpD+D8q|dWWdSZ@>3R&$+x;n$4vq~ySuv=Z~hXy%fe#p8kQx<+U~P*A*C$kVzeWW z!G*}*IsDw)2zam!L{ppwNF=P*61PX;a|GsGFp)gO*jnPvQ2?Vv8SZ(@NgAP*Q)?+W z;02$9V~=ps29hKJy8`WU4$E{WaMwo88-TSlmi3*`hB&sdjmtI|A{>I;FSbFRH4X!s z@kxCg_N^1sYRv+%@aS42m%WNZYxZ0bzBIB&MwZ^0SUB~4m|V19vCD5+yclt70cZT41n{C47ew?FCU(o({%!3h#S~~ zHz#7sV3pMyP#0Idg*D4O-wyv!Tx0vUYy>lmz% z>X_^t>z+N_>{y?pO=!WZJB7f*u~N9ymv1|KF17%C%fqIy`~@{a3ukT1hphn%W{hmA zIF7dAD7g4v(~ce@aiN%-h0HgHs|1h5S_TP4CQPk)u3k`N+bCK1qcE(ql%Cy*7<;WW z4GIJ!O8>`UMIhc~G#_zkFiy9Bk!Xxn1wKs z{L#Re4sD1?N`8ZgYBgFYfV_H!Z?(2*4Yz-W8P+E@Pk*$9!tNCc?c`5U9J_sBe40w; z4~*B*Jnm8dfsSp0?^Z}N__+#x0s;h(i_esx_6rCCiHG0c_Eh|Zv*r5qXV`v17=A}W zH{=^=zxJ!!M-3P_{g1fd_3P32bKem~p2N@Gj{q0|F62l;dx4W!64F44AUQD$3GD;p zul)k&`aY$l&cW)2XAB(1w!{afJb;K@eEhV0Ac%DVFaao8?5fDMrbtCQ*Y~%zIk=vp z3Bo+fDT^zdphF0R8`uvT2B;mj!~mAGE!J=d6BTj-Mk6VP7Fd;n8TqQV>10V*cMcP_ z6laq!FaQY!oPsZS=A-QrK(o$G0%jWEX6ZC4kV6HJx8+1j8lBJIz)zzK?D z**!++ax=gtQ$4{Spt)4vfmaA$WNovkK=ZU0i{3h;%J@PD03$@H@WnKLGcX4h>?NrJ zoka|;Ato~twROS4^Ek#&kK2(Cm(``f3_5W6VrxLCrL9we0~VZY6LV&w)oIEM0E+9* z^lV4ZX5sEEcmj0M0jC#Ax2@$RQAgbk;R3~2MOBA$db$9EwX<#o5k#9q8kkjwgDkGSTEkq=!_u70TwA`Vgv(;0+wn-d^$N%W5nFXU<`_^NCpsgs?&+wMLP$%aP7W=PNFqk4gc$ zysni>e~PyY%S)+nr^IrNBOhg_=+~l6PAE+Jotxb~iJ(PxyerO0;7S>XL_|516fy)p z1>sCN0msS^KZGO+T^0jICp*tEvv7ePIYhLdP_hBY7&siz{l!=92o$yE=6F{}#mm7S z^it!@j|2^%qR|d6HY3fbl=X;Nl%Ii`3nE}|)p?z1A7@Zt zFw3CG;35N=1)ivO{x*ZZ%RqEbKg`&d7~IeLe-ttJUG-q5iF!Q+3sx<)v!WFTf@|C_PjVDNt##Lxmir;pAA zgHIutZ#uScJxgFaoj9BMHD+(gX9k(}drT841O+Az#R$&Vkp{L^9D&Qo_nfkak*kQG z%VmlwGc$WKsa*eHB9|Q;GLsOKB?nWvbS{mQfm}A1$o1j($=r!-GQ+=tu^pJ=-GgQ} zjvu_Y^53NS6|Oqmg=^+OW<2wB<~w40u_VI(ow-Cdkr~bP=cbS&p4pw*k;!JZ51rLI zKfwX7ht(OSM=jFf|$oHjw$ck@Z~kAJ-K$7L@E zw8&?8VB_3D&xDc*&=pR6_Tx&2+*5F9EqY;n(i*Z2LX3FvDu6WXDX2d6=rUcC4}n-l zn5+&A)s9^R`-UG)weVF{{RL6JG|PSoX8?0p{nfJIRFZY_hUYE0(?^eD)8YV|_#K_9 zJBvq89K}&+2X>Q3vDI;y2~^h&uUa`hTM$$VgII099kPvrQ3K{K01bo)v(s&!PT@1N z?Gb4NYI zq@&QF=9aZu&`oWs0b>qiXe}hY-;Nx)0N0TKFh`E)1o$t;Ol@{x%--^ETFW%uHdt`r zfuqZ=a}-L~qxeQ}=~kvK?AR0Bl0f#QDJeJTM0rdoa7biqQ~{oKw9_6@e6|G$^Zjk$%I|N3AZ^qiSr>lCU4H=KVe%WKT4Ns@gekmp3FS^) zB%dxyTewHul9j9p;B zDQ;U2XP8efij#W9oE}D!dlkX11kYt?XE7sSMB!r<+HpFO8N*2oNyn*YIlsYB&Sdbc V1pX6`4It<4q3?*b|G$(?{69c3_<;Zb literal 0 HcmV?d00001 diff --git a/bin/splunklib/searchcommands/__pycache__/streaming_command.cpython-37.pyc b/bin/splunklib/searchcommands/__pycache__/streaming_command.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..a0feb85147591b7359b7c26bd7011c07abad29e3 GIT binary patch literal 6182 zcmcIo%WoV>8SkE+m)}mDg$)8!>`F-{>x`2iB4jqvCf)~;kQL#8c4v)ddb(;nZTBlx z-HttSE+i5h$lt*30U>UP3qsnzq;H5*&fGZh`>MLfGh^>&BSA}5Q{7XK@A><_uO2Kd z&0Bchdu6+$?^)Kr=%swB7<_=Hdg!>t?Z67{jvZDymC)%pVYO4W>3b!ph4oHdkDZ_q zHam0Sd}jggRbC4g!==uW9@m5AaHX?iTc23G!J98E-V`g(oz5zs+p${nuW*Xhs^%+h zU&TR|imo3fu}t$O_YZyL$5FnVNIy!ugDmn=dSA^V-;25E27W4}8>p6@JIX(5RdOeE zle|9g1N`yIrk(Bl(pDS|{Jl)Nw0cLRsUPj(`_hhZr8nG)!_bYmM|70WSO&N8MOYo1 zTb&BGI}WdK=Y`d&5|8+;fu}lefTxD2?>E2|I0RR7`RYz8g&X3eVt@CKj%Bg?NfP)% zF_)#%jnp8Pp{9zZF;kixCWXp^6jKyShvJwf9*O{C8E1P#`o@&rTd~AXMBqoFJwMOz zjE`iV;R!n&C%)$f!7(#3S!Zsx7m6rdXR%~i!rfG?PoE3|yjXJ8W;;ocMF;Gu9|V{Z zaXLF>e#H82>J7m~JkaN636cMko9j1k-aK9Xa}jyi+bwr5XkuZeQq~tNks@)Wz^*a9 z`q5#0fH&v@Jo_QG=Na_Ar#bfGOe#?xSn`cMVoxZAS#9=_>kaiTo{^{;##z8=3eP;D z=}57`SiXM5MFOrM42UOFZa)yLf6RFNG%^A%w~wVNB>Vt>i-8OMl)?$!W9ALx7{q}O znMHI^srph6EQpN~V*R(#^H8{)!SFVq2r0ri6(9o>;s|_77RFL&=@OksLMm()d6|@& z-ZpzocHob|7!)&*;zkCRuiA=JgQXN{mhps%vEG4);5>zNCm}fAq zJvvwU9F9irQ%YiNYlV5!tWzh`yWIW8$C&JAamGA18m|`Y7@8gv2mfGkQXI`PNJ28> zwX8h%k&M&WivxCTXX{am9YRtdliea2-i8ftJz}>Z-zS+5$I^5aW__pqW117d17x9I zuTlU2{ZiXPQ;XGc9yPeS=*e4<)vamr3OOUFYmL@|k4!s=vBH)UNLlS)9 zSET|9GD=gMc4cyBl8ttj7)(iM04Mm0g(4lsyr7uojjqfpqir_5rbIAUFVAgl1M}9l zln%2%G3{vY0`@}jy+pm1-P(*^3?Cq8#7w~W1?81ROgcf-bulTx5Uu!-fQRIyR zLo~~#yMVvt8oE1ZhAMk>`3zO%t{!HMAEN66H1&IQlaWJIDR#WHJ2i}|dQ``#rbi8o z>Uz|~s6la(&%JaybNX$bFTAXD<}qH>a~Aa1CBCfZEQ%$*!dG8d&+X1KzrZgdF0F{w z{R`@aSm|8kYmmjIb8O_isHjGtvtJ4zGa-eJKV#1Pp-_I4x)} zNVaEOk)X;*RTGj*Jj(;kGz}#ItodFUw?o#X0>0kPcuT_L{o## zBt3dQG%;^;y!keQRBxRbwIE-cCKuQ@rgv5sSwhJjj_e}g@H6CX2T#(QDEP`F^%<0v@3Uh21;J44mOQ?Y#v-4bi%%tCn#nF@0`_f!8+ zM|#Rq3S-r-KzRVNm~t5<5-RH}9hTEFo*{S|ftvndgeVUO@DO;_`dGMz1kwil3)U`u zSUV$^Fj#3~vQApdXk6}kMp0(3Y1!_a)xq?H(VwQsJS+0a{Gmv*DD#=fa%e{3yOih5 z$oqdp=3n1{OmFB$dxRMv4B0L62>6PQ%L)l@I)vuP9ZY(J?DPLZ@#E3&rbyn=5JL5U zwxLO}Li}oKWq-{#nZ%Styn*z$G_8>1SNlO9d(=tC})Za=kx2HF0w`aK8WWzK~Ha9kahN^9H zE07j$d>DHfC2=|~Z7}kh6%F7bX7KE4UZ|I4gAn25R9TYMeOqg?F2U+hqNAO zFjcXPpQ;|IUawJF@m~|8OJgF7n9`!z_Kf+=P&U@)<8Pp3X3bfzo6&AQH%9Q$(7HgLPp8YCqi&Y`8B0|iomE@3oA&6&nM2RbciU#QM^{DkI7QpSui8bITKo1Fl@ser z`x%uN&VKb-<{4}kbi=h+ydF7cMy@Pk8_%9$*ddIwm`ZR4)vOoT2 zBQ39Q%Q>7eKAotbB4`Iqmz^glJ1^s^?ZBb3b6e7c{H=4-ODz|qP4i?uU4!UU#hsQk zdGr0i4g1`^_ji2%H<~qi@GZqe7j0;vYQL`$P~F48Z1#rKFHFRVg67V`Db2TM^eY+2 z{0Ex+5xS|fNLN}?*Dx4M3eZ-JodJqf=7M zC|KoH`qC&?&XPrc{tZo0J#5+J15JD0Uac(Kb-RN1(akqi^>%9wrE^#7rkl@qyQq$` zKyJwbA#faMz@RL)a^>TQdZ-V4@^T zm3)_0EoeY7mmKm3G>ur3rxo%5Zx7}*W8~BxU6(KjMyGfk}rZpPTU4ou^?%9N0xJtKF z6J7H{)oC`HHH~Q7tqUZ*K}PrpF_DK$GG0m7%#v;lI&o*NMbUd=X6_37g0*fKVLR!cX`KRs3zjhqZ*K4lzNZkr$~WsR~BJ4i}s@PUv_ol AhyVZp literal 0 HcmV?d00001 diff --git a/bin/splunklib/searchcommands/__pycache__/validators.cpython-37.pyc b/bin/splunklib/searchcommands/__pycache__/validators.cpython-37.pyc new file mode 100755 index 0000000000000000000000000000000000000000..37a2542260e7b03a46192851b5b76975f09117be GIT binary patch literal 12814 zcmcIqPjnl{dEb8k79dDL6fMcJtZZaEHf4#Clr&8k+o2`PsTv!yEjzAZI?xikBtZcS zcy=L)5SW*mO0B-$dud)DnL}TD=%tsQdh6?sF`IooUR-dZAP8&NgOcz1W#+%;C9W=Sbs-{99|( z@VC@C+MRFA%P}^dJ=Qp;Yd_Y!vR8Sac@=+Q*K9n4_Ka6WyXrrK_Hndly*ae!Wcvi# zN4y%^HQ7Fi_EB#h?RnXL7VTr+0@@3*eG2VoyyIvem+j}!KH;52`=o52M*CUs6xyd` z`+2mV^G>6ETDH%i{k(Su?K86d4Ya@EIcPhw?V$Zl?=0G9W&4|6)tmEy+K zZ4k6uJuFyf@YYpAg8iuDgOwaJl2U^4OEU$pS1xZdR zF8|c+v^_WK3HLuCLe3Q|$Z2)lFm%>?!nu>)4V`Gyjhxn|-@5I%fycXDbcDVuTANO* z*X^>m*C)y0Ql;YH_epQybO&MNw1Z7wv?FI-^tyOaJ4~jKkKFNvXnVf1OpYwy$`-WL zY_{A^r`f#43FQk94NjMMI~^zJfoW>ygei}s z!AaJcOz1kB1F#hHyPmt&!ApHX@p5{(IpjhfM*>$8LYADn?sm!_$l5K(>-p&pwJ0zA z@h|pYyNWBMy@|A4?F&c`gkGo29jo`E>s<&X+2ebX;zc92Lcg;f7bL}hqlp@x{OZ%I z%d78+-iC0y&fD#dAFkf)cLu@jRfy}V+wX^~AHI3vUbmw*4?3~B)(%!fIi}NITU9cZ zh*Y?WcPj}Em-^eWol3AW_le8;RRziqwU^3Y@@)JqdSz^HB`#g*^*X*A z3}1EFCk4m1tXYCeg$SieypKL1ewE$(du?V;~ESWC327QF1-zqqqljw>+DK^VC~%a5%!P%9|oaXIqu zMa^itkF|GtcVUQ5vb!qZ(1hR(KtlyQNmN|!*b`($ZtLrKMaUi0^m%T^h+Kv z#jeq?y$a~A#@1E%2KRM(sIi37UrZNIdKm1UG8=SGeVQQ{^@-tdOHt2po%gn*O&C`7 zy0tC8MJGXLhw*E^pzI=;ax9AXj%7}}b(h}zepdVi8!^{~dtB#jo zWp4VKv8TTUZvmN&?)IFn-(7>LUv_$ODd(j}5q>!6zy+-*#JF^`=QF<;_6DNmtD!y7 z-e?DIXM)tk_rw+#JfV@96R`VII_|6U&)r29P2UCSO-w_m~(#BqhQ17 z!FLM^VUVr0-V1z62`~k?D!t2wGDM3`(YcB%9HPiuukF*3wx>r1`XE_b=B^o8 zdj;C@eb_Tgi;7Wc-MC#5k*@6+x;8u)SzESvXJl-Zcg(LX(x z<346ggC~A&{E_yF7NC?s#i++cg{Bv0#Uj=c7nKslr9>RXtLT-sU%bToGdH(`$h{}g zOkDmDJXBTly-zO3r7mQ~-SC08BN105_-hividXRLqBVvLaRIKQCj>3r0cJ{Dx{Cgg zMpe^o*h)h`tuN?B-8O6^ zbKk}-@qH8!4?$S9Sf1({h=6jgp~R+qc8p$Rji^bo|2irXOxh(&uguu9fHB;b7Ow1P zqPBu7~Vi{fn(H7Vx2GscM1S0 z#T2BBq!FzTU%1}s_!}_CtxZ_a77RoPzvIY7a*aSJJBy{UNQxg|vbfZjwj_uP2(nt6 zQsBN8_!y9r)JIBMMXv#T4CgaGO$Q$t?kB-=-Fp3N!HW?mljcDm*2a^h0As2Y#fI?3 zMLZ}7TXW(xO^C>g9w)I;0BPtI!!Ahxi9$4MAKP9|0;A zO)&_XDqf1qUK`LC@%wgakocVV5sNojjQJ|{l1lslmkcd57@?v*oF#QnG3~#?GpPxp zjAVLXBzgc%4KK@j0j9o4L%m~cpFxm^K%c7Q*H)S1M&`an_@5Ym^XF!2{8L@~?o~Hnz|A}ffxiM(^l-1=zqBJ` z`iz~g-@m*A-TTfCTm|(-`2(XD#ujvMGcL44qp+>Mz14BpdSE`UH!XE5?3%*F@wQK zT=IiEZP5z^qgS!QV$2^|lTu1q{{=1?M@hvm*z>ko)TI_oDF0*q2a=MH2`^+Yg|PQJ z;M|5UCc<7qJoHUf?Rtxo@kh00omcoX6c6bH%EEaFB*MR6YG3N>hNpYR0~<)@yUG3U zp;yPURPUK&^gCZ_=(Unw8*a{c040zHK_Q!By+td{AOF(eF}@2ai4UP67FKc_q2fc7 z;VS_kQUn;kr$5vmY9nnQz$wY)AXvV5?H=$u1Dcdp_#-BZgd1%5&ZkhE^H7)1&dcN# z)C7V=z@z++E^{Qv{FJ)xy>@rdMFo%$6}6GvzIaCF37hhTf6aAr!J2*?0rBvqgzK5T zS$eJ0gP@0REaje>Ah&>rCAkcIM;eIK#L0v}k_U{KkvF)6F<*niSO5LtU#28pno6QB z>UhS!C3XEb;>TM*Cx>0+o7L zJ17$qIE>E!DxapJr9OxYXLzxti!%%`5GLpd7?3_<4gTa?@Fy1hNnMA^Kp!mVLm>h_ z+fC9)DsUKSdnIBvT_GRn$C*RY!yq3a9bF5+nyviBcd(e)>hyw**n}dDRV1F@3VT5_ z$?&UqM6vAgNk)l-{(&^oq^40q5qPHx1b5P?Oz>!mTi{+w9-TrZ&!aQ|Bh`R1$mN=2 ze<$9@=%@LKf5Dj4Pt2tol?D%R1Jm(jqrqL za?}aFO+8=4xL>|4)hV7d(3$nk0fxWLvI(r7u6C zVn3o;5G)?O#)4dL*N_w+K6gEU0<>`smNHE0gG#NI;sRhnI})@gvDy6I#kkxYAeXZd z_+D&w{9vpYi^W{TtoP%h%4&%Q9u=Rm7()SRHfSyW4Oi%(P*ACg@EMpDV5utqokn>P zDzabnSc5%pPc$i_a| zX4S>Vc#GoCiTf9qFJAU`mhWFm{$7@Up;uHd(!*Hq#`@+#a*tv3BU~Z4o{)R7cPBt*ru%eG51Q=5Yh#cXixQQpdzlOA9Q+Cect|AdCo$BA*$7c*g4uaJ(lw;<-*c@G$_0ibt|>Exg_CGvS6c zhJ6=@7iKHG6VJZ?(TxwUHQ&8nzy9u<@5CiO%m{I;KH1L3rG&o!!V*-%9vX!Gm5C@O zqeLHf>Y2SGdoyRkl#Egp@B}q^O5;R^rz=OhYkUE<$--7R&mU&a(@pmkX~^{Kg(*oU zGAGZLGtXKOFJMA>(j~1%Yze=Q%o|T)R4za9RdG4%$q}7F4I5@G0ckve8P3X$K9sto zMPTSBt-?Q#Nu5tg6~`G_Zl8>dNM?FA7>gDNHzmS24hfT|)IN&2!Hm1x?t7&Vum z#FMs+gSX^6U%7WSD%0+~rG0jM)z~q2EFckkWbKoo=4;yLcRog%Gq{X7N|@mvlNlD| zQCLMK1^k-le)ut!@K95KQsm0|{>Gxm!6@|Y0mV*QtJkgz*O z{n3Bp2iQWlp3r<3UnhWO8X85ot$KQQV;f&tNQajq-Xj=<#PT5O4 zG2!wP(HNeXI7!X{ok9*Z|1l=X!J4PQ`b(ssN(z!>4^D)Ql$HYmPC_#QUqWM&7OWMb zz7Ko|6UDm8x}rr=0I4d72+s+wF}7~|+hJUkWQ1ZHT~7o1JYr4RT_L-bxc&|({}-6` z9N(IveA~3Eg1r+0YNBHWS4f{TA)uAVrg8Z7!c1Ivmnrh$)tr#Z<6BobYDg?@Lvmew zOS3WPxHz@Vjsu+}&z_u*N>tV22JkmwZO))06(%@5 z(Tf8(84*iglIBs5WlsG5PUf7Gu=gd*p;Gq*pJOc$lBN&P_aqUc2e?kcq&`Rt!b6JV zIRd9>1Z6`K$du_c0wD*Gd}GY~C#`|z`Td-j!>T|)GC|j+5d8L0?Jx2AH}mpdvG7#{ zdt11&A9r>IS4b_-%ey-E0*Ef|Ba)S}uK_ZvFL%nx(O2b$graqxeVxe(esUne^lx_F zb>x*Fy}o*P@zbBJdY`S{{r@=QdTII@S8g$fPPx7W)zif2NgGGu`IMYhel zC}o0pcnWCy2B59B#T4Do5^#G)9ihsvz;K+jSK7y0RCf39iGOOWiV2>z$Lt;30d)dH z%p>7yTxq%ec7$kR=pzrtI9sJWA9ql*MZU{$T-quF$M*C3d3|{55c8%>9ydjDgtD9G zNbUq*%D|K4o(3z-3EauUlc#c5oGN#mT%`iWNHvQP1rMXv`Ffe^V(ed*ebc4ue~TY) z`q2b_41J!@@$;oLj#i(iWSMlbj2HchWWCDIz{}!@B{B5)BEyd)#7X!TTaUw!_j8P^ zVlmTQD6O&Fr^~41_bGMxOb^kCPQi~!xOIFga@+wPqgu%0MY-a!%Y4$FbIX3trRNoUFzYR3i)w;C`#-(mVG?Dgiqs_ zx;=c(EyKImX!nFXvt?{6uCO2^i_6&)YI|mQ=i4mEaQP9V{8UALY#`__ + + """ + def __init__(self, o=None, **kwargs): + # + # The o argument enables the configuration decorator to be used with or without parentheses. For example, it + # enables you to write code that looks like this: + # + # @Configuration + # class Foo(SearchCommand): + # ... + # + # @Configuration() + # class Bar(SearchCommand): + # ... + # + # Without the o argument, the Python compiler will complain about the first form. With the o argument, both + # forms work. The first form provides a value for o: Foo. The second form does does not provide a value for o. + # The class or method decorated is not passed to the constructor. A value of None is passed instead. + # + self.settings = kwargs + + def __call__(self, o): + + if isfunction(o): + # We must wait to finalize configuration as the class containing this function is under construction + # at the time this call to decorate a member function. This will be handled in the call to + # o.ConfigurationSettings.fix_up(o) in the elif clause of this code block. + o._settings = self.settings + elif isclass(o): + + # Set command name + + name = o.__name__ + if name.endswith('Command'): + name = name[:-len('Command')] + o.name = six.text_type(name.lower()) + + # Construct ConfigurationSettings instance for the command class + + o.ConfigurationSettings = ConfigurationSettingsType( + module=o.__module__ + '.' + o.__name__, + name='ConfigurationSettings', + bases=(o.ConfigurationSettings,)) + + ConfigurationSetting.fix_up(o.ConfigurationSettings, self.settings) + o.ConfigurationSettings.fix_up(o) + Option.fix_up(o) + else: + raise TypeError('Incorrect usage: Configuration decorator applied to {0}'.format(type(o), o.__name__)) + + return o + + +class ConfigurationSetting(property): + """ Generates a :class:`property` representing the named configuration setting + + This is a convenience function designed to reduce the amount of boiler-plate code you must write; most notably for + property setters. + + :param name: Configuration setting name. + :type name: str or unicode + + :param doc: A documentation string. + :type doc: bytes, unicode or NoneType + + :param readonly: If true, specifies that the configuration setting is fixed. + :type name: bool or NoneType + + :param value: Configuration setting value. + + :return: A :class:`property` instance representing the configuration setting. + :rtype: property + + """ + def __init__(self, fget=None, fset=None, fdel=None, doc=None, name=None, readonly=None, value=None): + property.__init__(self, fget=fget, fset=fset, fdel=fdel, doc=doc) + self._readonly = readonly + self._value = value + self._name = name + + def __call__(self, function): + return self.getter(function) + + def deleter(self, function): + return self._copy_extra_attributes(property.deleter(self, function)) + + def getter(self, function): + return self._copy_extra_attributes(property.getter(self, function)) + + def setter(self, function): + return self._copy_extra_attributes(property.setter(self, function)) + + @staticmethod + def fix_up(cls, values): + + is_configuration_setting = lambda attribute: isinstance(attribute, ConfigurationSetting) + definitions = getmembers(cls, is_configuration_setting) + i = 0 + + for name, setting in definitions: + + if setting._name is None: + setting._name = name = six.text_type(name) + else: + name = setting._name + + validate, specification = setting._get_specification() + backing_field_name = '_' + name + + if setting.fget is None and setting.fset is None and setting.fdel is None: + + value = setting._value + + if setting._readonly or value is not None: + validate(specification, name, value) + + def fget(bfn, value): + return lambda this: getattr(this, bfn, value) + + setting = setting.getter(fget(backing_field_name, value)) + + if not setting._readonly: + + def fset(bfn, validate, specification, name): + return lambda this, value: setattr(this, bfn, validate(specification, name, value)) + + setting = setting.setter(fset(backing_field_name, validate, specification, name)) + + setattr(cls, name, setting) + + def is_supported_by_protocol(supporting_protocols): + + def is_supported_by_protocol(version): + return version in supporting_protocols + + return is_supported_by_protocol + + del setting._name, setting._value, setting._readonly + + setting.is_supported_by_protocol = is_supported_by_protocol(specification.supporting_protocols) + setting.supporting_protocols = specification.supporting_protocols + setting.backing_field_name = backing_field_name + definitions[i] = setting + setting.name = name + + i += 1 + + try: + value = values[name] + except KeyError: + continue + + if setting.fset is None: + raise ValueError('The value of configuration setting {} is fixed'.format(name)) + + setattr(cls, backing_field_name, validate(specification, name, value)) + del values[name] + + if len(values) > 0: + settings = sorted(list(six.iteritems(values))) + settings = imap(lambda n_v: '{}={}'.format(n_v[0], repr(n_v[1])), settings) + raise AttributeError('Inapplicable configuration settings: ' + ', '.join(settings)) + + cls.configuration_setting_definitions = definitions + + def _copy_extra_attributes(self, other): + other._readonly = self._readonly + other._value = self._value + other._name = self._name + return other + + def _get_specification(self): + + name = self._name + + try: + specification = ConfigurationSettingsType.specification_matrix[name] + except KeyError: + raise AttributeError('Unknown configuration setting: {}={}'.format(name, repr(self._value))) + + return ConfigurationSettingsType.validate_configuration_setting, specification + + +class Option(property): + """ Represents a search command option. + + Required options must be specified on the search command line. + + **Example:** + + Short form (recommended). When you are satisfied with built-in or custom validation behaviors. + + .. code-block:: python + :linenos: + + from splunklib.searchcommands.decorators import Option + from splunklib.searchcommands.validators import Fieldname + + total = Option( + doc=''' **Syntax:** **total=**** + **Description:** Name of the field that will hold the computed + sum''', + require=True, validate=Fieldname()) + + **Example:** + + Long form. Useful when you wish to manage the option value and its deleter/getter/setter side-effects yourself. You + must provide a getter and a setter. If your :code:`Option` requires `destruction `_ you must + also provide a deleter. You must be prepared to accept a value of :const:`None` which indicates that your + :code:`Option` is unset. + + .. code-block:: python + :linenos: + + from splunklib.searchcommands import Option + + @Option() + def logging_configuration(self): + \""" **Syntax:** logging_configuration= + **Description:** Loads an alternative logging configuration file for a command invocation. The logging + configuration file must be in Python ConfigParser-format. The ** name and all path names specified in + configuration are relative to the app root directory. + + \""" + return self._logging_configuration + + @logging_configuration.setter + def logging_configuration(self, value): + if value is not None + logging.configure(value) + self._logging_configuration = value + + def __init__(self) + self._logging_configuration = None + + """ + def __init__(self, fget=None, fset=None, fdel=None, doc=None, name=None, default=None, require=None, validate=None): + property.__init__(self, fget, fset, fdel, doc) + self.name = name + self.default = default + self.validate = validate + self.require = bool(require) + + def __call__(self, function): + return self.getter(function) + + # region Methods + + def deleter(self, function): + return self._copy_extra_attributes(property.deleter(self, function)) + + def getter(self, function): + return self._copy_extra_attributes(property.getter(self, function)) + + def setter(self, function): + return self._copy_extra_attributes(property.setter(self, function)) + + @classmethod + def fix_up(cls, command_class): + + is_option = lambda attribute: isinstance(attribute, Option) + definitions = getmembers(command_class, is_option) + validate_option_name = OptionName() + i = 0 + + for name, option in definitions: + + if option.name is None: + option.name = name # no validation required + else: + validate_option_name(option.name) + + if option.fget is None and option.fset is None and option.fdel is None: + backing_field_name = '_' + name + + def fget(bfn): + return lambda this: getattr(this, bfn, None) + + option = option.getter(fget(backing_field_name)) + + def fset(bfn, validate): + if validate is None: + return lambda this, value: setattr(this, bfn, value) + return lambda this, value: setattr(this, bfn, validate(value)) + + option = option.setter(fset(backing_field_name, option.validate)) + setattr(command_class, name, option) + + elif option.validate is not None: + + def fset(function, validate): + return lambda this, value: function(this, validate(value)) + + option = option.setter(fset(option.fset, option.validate)) + setattr(command_class, name, option) + + definitions[i] = name, option + i += 1 + + command_class.option_definitions = definitions + + def _copy_extra_attributes(self, other): + other.name = self.name + other.default = self.default + other.require = self.require + other.validate = self.validate + return other + + # endregion + + # region Types + + class Item(object): + """ Presents an instance/class view over a search command `Option`. + + This class is used by SearchCommand.process to parse and report on option values. + + """ + def __init__(self, command, option): + self._command = command + self._option = option + self._is_set = False + validator = self.validator + self._format = six.text_type if validator is None else validator.format + + def __repr__(self): + return '(' + repr(self.name) + ', ' + repr(self._format(self.value)) + ')' + + def __str__(self): + value = self.value + value = 'None' if value is None else json_encode_string(self._format(value)) + return self.name + '=' + value + + # region Properties + + @property + def is_required(self): + return bool(self._option.require) + + @property + def is_set(self): + """ Indicates whether an option value was provided as argument. + + """ + return self._is_set + + @property + def name(self): + return self._option.name + + @property + def validator(self): + return self._option.validate + + @property + def value(self): + return self._option.__get__(self._command) + + @value.setter + def value(self, value): + self._option.__set__(self._command, value) + self._is_set = True + + # endregion + + # region Methods + + def reset(self): + self._option.__set__(self._command, self._option.default) + self._is_set = False + + pass + # endregion + + class View(OrderedDict): + """ Presents an ordered dictionary view of the set of :class:`Option` arguments to a search command. + + This class is used by SearchCommand.process to parse and report on option values. + + """ + def __init__(self, command): + definitions = type(command).option_definitions + item_class = Option.Item + OrderedDict.__init__(self, ((option.name, item_class(command, option)) for (name, option) in definitions)) + + def __repr__(self): + text = 'Option.View([' + ','.join(imap(lambda item: repr(item), six.itervalues(self))) + '])' + return text + + def __str__(self): + text = ' '.join([str(item) for item in six.itervalues(self) if item.is_set]) + return text + + # region Methods + + def get_missing(self): + missing = [item.name for item in six.itervalues(self) if item.is_required and not item.is_set] + return missing if len(missing) > 0 else None + + def reset(self): + for value in six.itervalues(self): + value.reset() + + pass + # endregion + + pass + # endregion + + +__all__ = ['Configuration', 'Option'] diff --git a/bin/splunklib/searchcommands/environment.py b/bin/splunklib/searchcommands/environment.py new file mode 100644 index 0000000..e92018f --- /dev/null +++ b/bin/splunklib/searchcommands/environment.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# +# Copyright © 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from logging import getLogger, root, StreamHandler +from logging.config import fileConfig +from os import chdir, environ, path +from splunklib.six.moves import getcwd + +import sys + + +def configure_logging(logger_name, filename=None): + """ Configure logging and return the named logger and the location of the logging configuration file loaded. + + This function expects a Splunk app directory structure:: + + + bin + ... + default + ... + local + ... + + This function looks for a logging configuration file at each of these locations, loading the first, if any, + logging configuration file that it finds:: + + local/{name}.logging.conf + default/{name}.logging.conf + local/logging.conf + default/logging.conf + + The current working directory is set to ** before the logging configuration file is loaded. Hence, paths + in the logging configuration file are relative to **. The current directory is reset before return. + + You may short circuit the search for a logging configuration file by providing an alternative file location in + `path`. Logging configuration files must be in `ConfigParser format`_. + + #Arguments: + + :param logger_name: Logger name + :type logger_name: bytes, unicode + + :param filename: Location of an alternative logging configuration file or `None`. + :type filename: bytes, unicode or NoneType + + :returns: The named logger and the location of the logging configuration file loaded. + :rtype: tuple + + .. _ConfigParser format: https://docs.python.org/2/library/logging.config.html#configuration-file-format + + """ + if filename is None: + if logger_name is None: + probing_paths = [path.join('local', 'logging.conf'), path.join('default', 'logging.conf')] + else: + probing_paths = [ + path.join('local', logger_name + '.logging.conf'), + path.join('default', logger_name + '.logging.conf'), + path.join('local', 'logging.conf'), + path.join('default', 'logging.conf')] + for relative_path in probing_paths: + configuration_file = path.join(app_root, relative_path) + if path.exists(configuration_file): + filename = configuration_file + break + elif not path.isabs(filename): + found = False + for conf in 'local', 'default': + configuration_file = path.join(app_root, conf, filename) + if path.exists(configuration_file): + filename = configuration_file + found = True + break + if not found: + raise ValueError('Logging configuration file "{}" not found in local or default directory'.format(filename)) + elif not path.exists(filename): + raise ValueError('Logging configuration file "{}" not found'.format(filename)) + + if filename is not None: + global _current_logging_configuration_file + filename = path.realpath(filename) + + if filename != _current_logging_configuration_file: + working_directory = getcwd() + chdir(app_root) + try: + fileConfig(filename, {'SPLUNK_HOME': splunk_home}) + finally: + chdir(working_directory) + _current_logging_configuration_file = filename + + if len(root.handlers) == 0: + root.addHandler(StreamHandler()) + + return None if logger_name is None else getLogger(logger_name), filename + + +_current_logging_configuration_file = None + +splunk_home = path.abspath(path.join(getcwd(), environ.get('SPLUNK_HOME', ''))) +app_file = getattr(sys.modules['__main__'], '__file__', sys.executable) +app_root = path.dirname(path.abspath(path.dirname(app_file))) + +splunklib_logger, logging_configuration = configure_logging('splunklib') + + +__all__ = ['app_file', 'app_root', 'logging_configuration', 'splunk_home', 'splunklib_logger'] diff --git a/bin/splunklib/searchcommands/eventing_command.py b/bin/splunklib/searchcommands/eventing_command.py new file mode 100644 index 0000000..27dc13a --- /dev/null +++ b/bin/splunklib/searchcommands/eventing_command.py @@ -0,0 +1,149 @@ +# coding=utf-8 +# +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from splunklib import six +from splunklib.six.moves import map as imap + +from .decorators import ConfigurationSetting +from .search_command import SearchCommand + + +class EventingCommand(SearchCommand): + """ Applies a transformation to search results as they travel through the events pipeline. + + Eventing commands typically filter, group, order, and/or or augment event records. Examples of eventing commands + from Splunk's built-in command set include sort_, dedup_, and cluster_. Each execution of an eventing command + should produce a set of event records that is independently usable by downstream processors. + + .. _sort: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Sort + .. _dedup: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Dedup + .. _cluster: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Cluster + + EventingCommand configuration + ============================== + + You can configure your command for operation under Search Command Protocol (SCP) version 1 or 2. SCP 2 requires + Splunk 6.3 or later. + + """ + # region Methods + + def transform(self, records): + """ Generator function that processes and yields event records to the Splunk events pipeline. + + You must override this method. + + """ + raise NotImplementedError('EventingCommand.transform(self, records)') + + def _execute(self, ifile, process): + SearchCommand._execute(self, ifile, self.transform) + + # endregion + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + """ Represents the configuration settings that apply to a :class:`EventingCommand`. + + """ + # region SCP v1/v2 properties + + required_fields = ConfigurationSetting(doc=''' + List of required fields for this search which back-propagates to the generating search. + + Setting this value enables selected fields mode under SCP 2. Under SCP 1 you must also specify + :code:`clear_required_fields=True` to enable selected fields mode. To explicitly select all fields, + specify a value of :const:`['*']`. No error is generated if a specified field is missing. + + Default: :const:`None`, which implicitly selects all fields. + + ''') + + # endregion + + # region SCP v1 properties + + clear_required_fields = ConfigurationSetting(doc=''' + :const:`True`, if required_fields represent the *only* fields required. + + If :const:`False`, required_fields are additive to any fields that may be required by subsequent commands. + In most cases, :const:`False` is appropriate for eventing commands. + + Default: :const:`False` + + ''') + + retainsevents = ConfigurationSetting(readonly=True, value=True, doc=''' + :const:`True`, if the command retains events the way the sort/dedup/cluster commands do. + + If :const:`False`, the command transforms events the way the stats command does. + + Fixed: :const:`True` + + ''') + + # endregion + + # region SCP v2 properties + + maxinputs = ConfigurationSetting(doc=''' + Specifies the maximum number of events that can be passed to the command for each invocation. + + This limit cannot exceed the value of `maxresultrows` as defined in limits.conf_. Under SCP 1 you must + specify this value in commands.conf_. + + Default: The value of `maxresultrows`. + + Supported by: SCP 2 + + .. _limits.conf: http://docs.splunk.com/Documentation/Splunk/latest/admin/Limitsconf + + ''') + + type = ConfigurationSetting(readonly=True, value='events', doc=''' + Command type + + Fixed: :const:`'events'`. + + Supported by: SCP 2 + + ''') + + # endregion + + # region Methods + + @classmethod + def fix_up(cls, command): + """ Verifies :code:`command` class structure. + + """ + if command.transform == EventingCommand.transform: + raise AttributeError('No EventingCommand.transform override') + SearchCommand.ConfigurationSettings.fix_up(command) + + # TODO: Stop looking like a dictionary because we don't obey the semantics + # N.B.: Does not use Python 2 dict copy semantics + def iteritems(self): + iteritems = SearchCommand.ConfigurationSettings.iteritems(self) + return imap(lambda name_value: (name_value[0], 'events' if name_value[0] == 'type' else name_value[1]), iteritems) + + # N.B.: Does not use Python 3 dict view semantics + if not six.PY2: + items = iteritems + + # endregion diff --git a/bin/splunklib/searchcommands/external_search_command.py b/bin/splunklib/searchcommands/external_search_command.py new file mode 100644 index 0000000..c230624 --- /dev/null +++ b/bin/splunklib/searchcommands/external_search_command.py @@ -0,0 +1,228 @@ +# coding=utf-8 +# +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from logging import getLogger +import os +import sys +import traceback +from splunklib import six + +if sys.platform == 'win32': + from signal import signal, CTRL_BREAK_EVENT, SIGBREAK, SIGINT, SIGTERM + from subprocess import Popen + import atexit + +from . import splunklib_logger as logger + +# P1 [ ] TODO: Add ExternalSearchCommand class documentation + + +class ExternalSearchCommand(object): + """ + """ + def __init__(self, path, argv=None, environ=None): + + if not isinstance(path, (bytes, six.text_type)): + raise ValueError('Expected a string value for path, not {}'.format(repr(path))) + + self._logger = getLogger(self.__class__.__name__) + self._path = six.text_type(path) + self._argv = None + self._environ = None + + self.argv = argv + self.environ = environ + + # region Properties + + @property + def argv(self): + return getattr(self, '_argv') + + @argv.setter + def argv(self, value): + if not (value is None or isinstance(value, (list, tuple))): + raise ValueError('Expected a list, tuple or value of None for argv, not {}'.format(repr(value))) + self._argv = value + + @property + def environ(self): + return getattr(self, '_environ') + + @environ.setter + def environ(self, value): + if not (value is None or isinstance(value, dict)): + raise ValueError('Expected a dictionary value for environ, not {}'.format(repr(value))) + self._environ = value + + @property + def logger(self): + return self._logger + + @property + def path(self): + return self._path + + # endregion + + # region Methods + + def execute(self): + # noinspection PyBroadException + try: + if self._argv is None: + self._argv = os.path.splitext(os.path.basename(self._path))[0] + self._execute(self._path, self._argv, self._environ) + except: + error_type, error, tb = sys.exc_info() + message = 'Command execution failed: ' + six.text_type(error) + self._logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) + sys.exit(1) + + if sys.platform == 'win32': + + @staticmethod + def _execute(path, argv=None, environ=None): + """ Executes an external search command. + + :param path: Path to the external search command. + :type path: unicode + + :param argv: Argument list. + :type argv: list or tuple + The arguments to the child process should start with the name of the command being run, but this is not + enforced. A value of :const:`None` specifies that the base name of path name :param:`path` should be used. + + :param environ: A mapping which is used to define the environment variables for the new process. + :type environ: dict or None. + This mapping is used instead of the current process’s environment. A value of :const:`None` specifies that + the :data:`os.environ` mapping should be used. + + :return: None + + """ + search_path = os.getenv('PATH') if environ is None else environ.get('PATH') + found = ExternalSearchCommand._search_path(path, search_path) + + if found is None: + raise ValueError('Cannot find command on path: {}'.format(path)) + + path = found + logger.debug('starting command="%s", arguments=%s', path, argv) + + def terminate(signal_number, frame): + sys.exit('External search command is terminating on receipt of signal={}.'.format(signal_number)) + + def terminate_child(): + if p.pid is not None and p.returncode is None: + logger.debug('terminating command="%s", arguments=%d, pid=%d', path, argv, p.pid) + os.kill(p.pid, CTRL_BREAK_EVENT) + + p = Popen(argv, executable=path, env=environ, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr) + atexit.register(terminate_child) + signal(SIGBREAK, terminate) + signal(SIGINT, terminate) + signal(SIGTERM, terminate) + + logger.debug('started command="%s", arguments=%s, pid=%d', path, argv, p.pid) + p.wait() + + logger.debug('finished command="%s", arguments=%s, pid=%d, returncode=%d', path, argv, p.pid, p.returncode) + + if p.returncode != 0: + sys.exit(p.returncode) + + @staticmethod + def _search_path(executable, paths): + """ Locates an executable program file. + + :param executable: The name of the executable program to locate. + :type executable: unicode + + :param paths: A list of one or more directory paths where executable programs are located. + :type paths: unicode + + :return: + :rtype: Path to the executable program located or :const:`None`. + + """ + directory, filename = os.path.split(executable) + extension = os.path.splitext(filename)[1].upper() + executable_extensions = ExternalSearchCommand._executable_extensions + + if directory: + if len(extension) and extension in executable_extensions: + return None + for extension in executable_extensions: + path = executable + extension + if os.path.isfile(path): + return path + return None + + if not paths: + return None + + directories = [directory for directory in paths.split(';') if len(directory)] + + if len(directories) == 0: + return None + + if len(extension) and extension in executable_extensions: + for directory in directories: + path = os.path.join(directory, executable) + if os.path.isfile(path): + return path + return None + + for directory in directories: + path_without_extension = os.path.join(directory, executable) + for extension in executable_extensions: + path = path_without_extension + extension + if os.path.isfile(path): + return path + + return None + + _executable_extensions = ('.COM', '.EXE') + else: + @staticmethod + def _execute(path, argv, environ): + if environ is None: + os.execvp(path, argv) + else: + os.execvpe(path, argv, environ) + return + + # endregion + + +def execute(path, argv=None, environ=None, command_class=ExternalSearchCommand): + """ + :param path: + :type path: basestring + :param argv: + :type: argv: list, tuple, or None + :param environ: + :type environ: dict + :param command_class: External search command class to instantiate and execute. + :type command_class: type + :return: + :rtype: None + """ + assert issubclass(command_class, ExternalSearchCommand) + command_class(path, argv, environ).execute() diff --git a/bin/splunklib/searchcommands/generating_command.py b/bin/splunklib/searchcommands/generating_command.py new file mode 100644 index 0000000..724d45d --- /dev/null +++ b/bin/splunklib/searchcommands/generating_command.py @@ -0,0 +1,350 @@ +# coding=utf-8 +# +# Copyright © 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from .decorators import ConfigurationSetting +from .search_command import SearchCommand + +from splunklib import six +from splunklib.six.moves import map as imap, filter as ifilter + +# P1 [O] TODO: Discuss generates_timeorder in the class-level documentation for GeneratingCommand + + +class GeneratingCommand(SearchCommand): + """ Generates events based on command arguments. + + Generating commands receive no input and must be the first command on a pipeline. There are three pipelines: + streams, events, and reports. The streams pipeline generates or processes time-ordered event records on an + indexer or search head. + + Streaming commands filter, modify, or augment event records and can be applied to subsets of index data in a + parallel manner. An example of a streaming command from Splunk's built-in command set is rex_ which extracts and + adds fields to event records at search time. Records that pass through the streams pipeline move on to the events + pipeline. + + The events pipeline generates or processes records on a search head. Eventing commands typically filter, group, + order, or augment event records. Examples of eventing commands from Splunk's built-in command set include sort_, + dedup_, and cluster_. Each execution of an eventing command should produce a set of event records that is + independently usable by downstream processors. Records that pass through the events pipeline move on to the reports + pipeline. + + The reports pipeline also runs on a search head, but yields data structures for presentation, not event records. + Examples of streaming from Splunk's built-in command set include chart_, stats_, and contingency_. + + GeneratingCommand configuration + =============================== + + Configure your generating command based on the pipeline that it targets. How you configure your command depends on + the Search Command Protocol (SCP) version. + + +----------+-------------------------------------+--------------------------------------------+ + | Pipeline | SCP 1 | SCP 2 | + +==========+=====================================+============================================+ + | streams | streaming=True[,local=[True|False]] | type='streaming'[,distributed=[true|false] | + +----------+-------------------------------------+--------------------------------------------+ + | events | retainsevents=True, streaming=False | type='events' | + +----------+-------------------------------------+--------------------------------------------+ + | reports | streaming=False | type='reporting' | + +----------+-------------------------------------+--------------------------------------------+ + + Only streaming commands may be distributed to indexers. By default generating commands are configured to run + locally in the streams pipeline and will run under either SCP 1 or SCP 2. + + .. code-block:: python + + @Configuration() + class StreamingGeneratingCommand(GeneratingCommand) + ... + + How you configure your command to run on a different pipeline or in a distributed fashion depends on what SCP + protocol versions you wish to support. You must be sure to configure your command consistently for each protocol, + if you wish to support both protocol versions correctly. + + .. _chart: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Chart + .. _cluster: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Cluster + .. _contingency: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Contingency + .. _dedup: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Dedup + .. _rex: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Rex + .. _sort: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Sort + .. _stats: http://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Stats + + Distributed Generating command + ============================== + + Commands configured like this will run as the first command on search heads and/or indexers on the streams pipeline. + + +----------+---------------------------------------------------+---------------------------------------------------+ + | Pipeline | SCP 1 | SCP 2 | + +==========+===================================================+===================================================+ + | streams | 1. Add this line to your command's stanza in | 1. Add this configuration setting to your code: | + | | | | + | | default/commands.conf:: | .. code-block:: python | + | | | | + | | local = false | @Configuration(distributed=True) | + | | | class SomeCommand(GeneratingCommand) | + | | | ... | + | | 2. Restart splunk | | + | | | 2. You are good to go; no need to restart Splunk | + +----------+---------------------------------------------------+---------------------------------------------------+ + + Eventing Generating command + =========================== + + Generating commands configured like this will run as the first command on a search head on the events pipeline. + + +----------+---------------------------------------------------+---------------------------------------------------+ + | Pipeline | SCP 1 | SCP 2 | + +==========+===================================================+===================================================+ + | events | You have a choice. Add these configuration | Add this configuration setting to your command | + | | settings to your command class: | setting to your command class: | + | | | | + | | .. code-block:: python | .. code-block:: python | + | | | | + | | @Configuration( | @Configuration(type='events') | + | | retainsevents=True, streaming=False) | class SomeCommand(GeneratingCommand) | + | | class SomeCommand(GeneratingCommand) | ... | + | | ... | | + | | | | + | | Or add these lines to default/commands.conf: | | + | | | | + | | .. code-block:: text | | + | | | | + | | retainsevents = true | | + | | streaming = false | | + +----------+---------------------------------------------------+---------------------------------------------------+ + + Configure your command class like this, if you wish to support both protocols: + + .. code-block:: python + + @Configuration(type='events', retainsevents=True, streaming=False) + class SomeCommand(GeneratingCommand) + ... + + You might also consider adding these lines to commands.conf instead of adding them to your command class: + + .. code-block:: python + + retainsevents = false + streaming = false + + Reporting Generating command + ============================ + + Commands configured like this will run as the first command on a search head on the reports pipeline. + + +----------+---------------------------------------------------+---------------------------------------------------+ + | Pipeline | SCP 1 | SCP 2 | + +==========+===================================================+===================================================+ + | events | You have a choice. Add these configuration | Add this configuration setting to your command | + | | settings to your command class: | setting to your command class: | + | | | | + | | .. code-block:: python | .. code-block:: python | + | | | | + | | @Configuration(retainsevents=False) | @Configuration(type='reporting') | + | | class SomeCommand(GeneratingCommand) | class SomeCommand(GeneratingCommand) | + | | ... | ... | + | | | | + | | Or add this lines to default/commands.conf: | | + | | | | + | | .. code-block:: text | | + | | | | + | | retainsevents = false | | + | | streaming = false | | + +----------+---------------------------------------------------+---------------------------------------------------+ + + Configure your command class like this, if you wish to support both protocols: + + .. code-block:: python + + @Configuration(type='reporting', streaming=False) + class SomeCommand(GeneratingCommand) + ... + + You might also consider adding these lines to commands.conf instead of adding them to your command class: + + .. code-block:: text + + retainsevents = false + streaming = false + + """ + # region Methods + + def generate(self): + """ A generator that yields records to the Splunk processing pipeline + + You must override this method. + + """ + raise NotImplementedError('GeneratingCommand.generate(self)') + + def _execute(self, ifile, process): + """ Execution loop + + :param ifile: Input file object. Unused. + :type ifile: file + + :return: `None`. + + """ + if self._protocol_version == 2: + self._execute_v2(ifile, self.generate()) + else: + assert self._protocol_version == 1 + self._record_writer.write_records(self.generate()) + self.finish() + + def _execute_chunk_v2(self, process, chunk): + count = 0 + for row in process: + self._record_writer.write_record(row) + count += 1 + if count == self._record_writer._maxresultrows: + self._finished = False + return + self._finished = True + + # endregion + + # region Types + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + """ Represents the configuration settings for a :code:`GeneratingCommand` class. + + """ + # region SCP v1/v2 Properties + + generating = ConfigurationSetting(readonly=True, value=True, doc=''' + Tells Splunk that this command generates events, but does not process inputs. + + Generating commands must appear at the front of the search pipeline identified by :meth:`type`. + + Fixed: :const:`True` + + Supported by: SCP 1, SCP 2 + + ''') + + # endregion + + # region SCP v1 Properties + + generates_timeorder = ConfigurationSetting(doc=''' + :const:`True`, if the command generates new events. + + Default: :const:`False` + + Supported by: SCP 1 + + ''') + + local = ConfigurationSetting(doc=''' + :const:`True`, if the command should run locally on the search head. + + Default: :const:`False` + + Supported by: SCP 1 + + ''') + + retainsevents = ConfigurationSetting(doc=''' + :const:`True`, if the command retains events the way the sort, dedup, and cluster commands do, or whether it + transforms them the way the stats command does. + + Default: :const:`False` + + Supported by: SCP 1 + + ''') + + streaming = ConfigurationSetting(doc=''' + :const:`True`, if the command is streamable. + + Default: :const:`True` + + Supported by: SCP 1 + + ''') + + # endregion + + # region SCP v2 Properties + + distributed = ConfigurationSetting(value=False, doc=''' + True, if this command should be distributed to indexers. + + This value is ignored unless :meth:`type` is equal to :const:`streaming`. It is only this command type that + may be distributed. + + Default: :const:`False` + + Supported by: SCP 2 + + ''') + + type = ConfigurationSetting(value='streaming', doc=''' + A command type name. + + ==================== ====================================================================================== + Value Description + -------------------- -------------------------------------------------------------------------------------- + :const:`'events'` Runs as the first command in the Splunk events pipeline. Cannot be distributed. + :const:`'reporting'` Runs as the first command in the Splunk reports pipeline. Cannot be distributed. + :const:`'streaming'` Runs as the first command in the Splunk streams pipeline. May be distributed. + ==================== ====================================================================================== + + Default: :const:`'streaming'` + + Supported by: SCP 2 + + ''') + + # endregion + + # region Methods + + @classmethod + def fix_up(cls, command): + """ Verifies :code:`command` class structure. + + """ + if command.generate == GeneratingCommand.generate: + raise AttributeError('No GeneratingCommand.generate override') + + # TODO: Stop looking like a dictionary because we don't obey the semantics + # N.B.: Does not use Python 2 dict copy semantics + def iteritems(self): + iteritems = SearchCommand.ConfigurationSettings.iteritems(self) + version = self.command.protocol_version + if version == 2: + iteritems = ifilter(lambda name_value1: name_value1[0] != 'distributed', iteritems) + if not self.distributed and self.type == 'streaming': + iteritems = imap( + lambda name_value: (name_value[0], 'stateful') if name_value[0] == 'type' else (name_value[0], name_value[1]), iteritems) + return iteritems + + # N.B.: Does not use Python 3 dict view semantics + if not six.PY2: + items = iteritems + + pass + # endregion + + pass + # endregion diff --git a/bin/splunklib/searchcommands/internals.py b/bin/splunklib/searchcommands/internals.py new file mode 100644 index 0000000..85f9e0f --- /dev/null +++ b/bin/splunklib/searchcommands/internals.py @@ -0,0 +1,844 @@ +# coding=utf-8 +# +# Copyright © 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function + +from io import TextIOWrapper +from collections import deque, namedtuple +from splunklib import six +try: + from collections import OrderedDict # must be python 2.7 +except ImportError: + from ..ordereddict import OrderedDict +from splunklib.six.moves import StringIO +from itertools import chain +from splunklib.six.moves import map as imap +from json import JSONDecoder, JSONEncoder +from json.encoder import encode_basestring_ascii as json_encode_string +from splunklib.six.moves import urllib + +import csv +import gzip +import os +import re +import sys +import warnings + +from . import environment + +csv.field_size_limit(10485760) # The default value is 128KB; upping to 10MB. See SPL-12117 for background on this issue + + +def set_binary_mode(fh): + """ Helper method to set up binary mode for file handles. + Emphasis being sys.stdin, sys.stdout, sys.stderr. + For python3, we want to return .buffer + For python2+windows we want to set os.O_BINARY + """ + typefile = TextIOWrapper if sys.version_info >= (3, 0) else file + # check for file handle + if not isinstance(fh, typefile): + return fh + + # check for python3 and buffer + if sys.version_info >= (3, 0) and hasattr(fh, 'buffer'): + return fh.buffer + # check for python3 + elif sys.version_info >= (3, 0): + pass + # check for windows python2. SPL-175233 -- python3 stdout is already binary + elif sys.platform == 'win32': + # Work around the fact that on Windows '\n' is mapped to '\r\n'. The typical solution is to simply open files in + # binary mode, but stdout is already open, thus this hack. 'CPython' and 'PyPy' work differently. We assume that + # all other Python implementations are compatible with 'CPython'. This might or might not be a valid assumption. + from platform import python_implementation + implementation = python_implementation() + if implementation == 'PyPy': + return os.fdopen(fh.fileno(), 'wb', 0) + else: + import msvcrt + msvcrt.setmode(fh.fileno(), os.O_BINARY) + return fh + + +class CommandLineParser(object): + r""" Parses the arguments to a search command. + + A search command line is described by the following syntax. + + **Syntax**:: + + command = command-name *[wsp option] *[wsp [dquote] field-name [dquote]] + command-name = alpha *( alpha / digit ) + option = option-name [wsp] "=" [wsp] option-value + option-name = alpha *( alpha / digit / "_" ) + option-value = word / quoted-string + word = 1*( %01-%08 / %0B / %0C / %0E-1F / %21 / %23-%FF ) ; Any character but DQUOTE and WSP + quoted-string = dquote *( word / wsp / "\" dquote / dquote dquote ) dquote + field-name = ( "_" / alpha ) *( alpha / digit / "_" / "." / "-" ) + + **Note:** + + This syntax is constrained to an 8-bit character set. + + **Note:** + + This syntax does not show that `field-name` values may be comma-separated when in fact they can be. This is + because Splunk strips commas from the command line. A custom search command will never see them. + + **Example:** + + countmatches fieldname = word_count pattern = \w+ some_text_field + + Option names are mapped to properties in the targeted ``SearchCommand``. It is the responsibility of the property + setters to validate the values they receive. Property setters may also produce side effects. For example, + setting the built-in `log_level` immediately changes the `log_level`. + + """ + @classmethod + def parse(cls, command, argv): + """ Splits an argument list into an options dictionary and a fieldname + list. + + The argument list, `argv`, must be of the form:: + + *[option]... *[] + + Options are validated and assigned to items in `command.options`. Field names are validated and stored in the + list of `command.fieldnames`. + + #Arguments: + + :param command: Search command instance. + :type command: ``SearchCommand`` + :param argv: List of search command arguments. + :type argv: ``list`` + :return: ``None`` + + #Exceptions: + + ``SyntaxError``: Argument list is incorrectly formed. + ``ValueError``: Unrecognized option/field name, or an illegal field value. + + """ + debug = environment.splunklib_logger.debug + command_class = type(command).__name__ + + # Prepare + + debug('Parsing %s command line: %r', command_class, argv) + command.fieldnames = None + command.options.reset() + argv = ' '.join(argv) + + command_args = cls._arguments_re.match(argv) + + if command_args is None: + raise SyntaxError('Syntax error: {}'.format(argv)) + + # Parse options + + for option in cls._options_re.finditer(command_args.group('options')): + name, value = option.group('name'), option.group('value') + if name not in command.options: + raise ValueError( + 'Unrecognized {} command option: {}={}'.format(command.name, name, json_encode_string(value))) + command.options[name].value = cls.unquote(value) + + missing = command.options.get_missing() + + if missing is not None: + if len(missing) > 1: + raise ValueError( + 'Values for these {} command options are required: {}'.format(command.name, ', '.join(missing))) + raise ValueError('A value for {} command option {} is required'.format(command.name, missing[0])) + + # Parse field names + + fieldnames = command_args.group('fieldnames') + + if fieldnames is None: + command.fieldnames = [] + else: + command.fieldnames = [cls.unquote(value.group(0)) for value in cls._fieldnames_re.finditer(fieldnames)] + + debug(' %s: %s', command_class, command) + + @classmethod + def unquote(cls, string): + """ Removes quotes from a quoted string. + + Splunk search command quote rules are applied. The enclosing double-quotes, if present, are removed. Escaped + double-quotes ('\"' or '""') are replaced by a single double-quote ('"'). + + **NOTE** + + We are not using a json.JSONDecoder because Splunk quote rules are different than JSON quote rules. A + json.JSONDecoder does not recognize a pair of double-quotes ('""') as an escaped quote ('"') and will + decode single-quoted strings ("'") in addition to double-quoted ('"') strings. + + """ + if len(string) == 0: + return '' + + if string[0] == '"': + if len(string) == 1 or string[-1] != '"': + raise SyntaxError('Poorly formed string literal: ' + string) + string = string[1:-1] + + if len(string) == 0: + return '' + + def replace(match): + value = match.group(0) + if value == '""': + return '"' + if len(value) < 2: + raise SyntaxError('Poorly formed string literal: ' + string) + return value[1] + + result = re.sub(cls._escaped_character_re, replace, string) + return result + + # region Class variables + + _arguments_re = re.compile(r""" + ^\s* + (?P # Match a leading set of name/value pairs + (?: + (?:(?=\w)[^\d]\w*) # name + \s*=\s* # = + (?:"(?:\\.|""|[^"])*"|(?:\\.|[^\s"])+)\s* # value + )* + )\s* + (?P # Match a trailing set of field names + (?: + (?:"(?:\\.|""|[^"])*"|(?:\\.|[^\s"])+)\s* + )* + )\s*$ + """, re.VERBOSE | re.UNICODE) + + _escaped_character_re = re.compile(r'(\\.|""|[\\"])') + + _fieldnames_re = re.compile(r"""("(?:\\.|""|[^"\\])+"|(?:\\.|[^\s"])+)""") + + _options_re = re.compile(r""" + # Captures a set of name/value pairs when used with re.finditer + (?P(?:(?=\w)[^\d]\w*)) # name + \s*=\s* # = + (?P"(?:\\.|""|[^"])*"|(?:\\.|[^\s"])+) # value + """, re.VERBOSE | re.UNICODE) + + # endregion + + +class ConfigurationSettingsType(type): + """ Metaclass for constructing ConfigurationSettings classes. + + Instances of :class:`ConfigurationSettingsType` construct :class:`ConfigurationSettings` classes from classes from + a base :class:`ConfigurationSettings` class and a dictionary of configuration settings. The settings in the + dictionary are validated against the settings in the base class. You cannot add settings, you can only change their + backing-field values and you cannot modify settings without backing-field values. These are considered fixed + configuration setting values. + + This is an internal class used in two places: + + + :meth:`decorators.Configuration.__call__` + + Adds a ConfigurationSettings attribute to a :class:`SearchCommand` class. + + + :meth:`reporting_command.ReportingCommand.fix_up` + + Adds a ConfigurationSettings attribute to a :meth:`ReportingCommand.map` method, if there is one. + + """ + def __new__(mcs, module, name, bases): + mcs = super(ConfigurationSettingsType, mcs).__new__(mcs, str(name), bases, {}) + return mcs + + def __init__(cls, module, name, bases): + + super(ConfigurationSettingsType, cls).__init__(name, bases, None) + cls.__module__ = module + + @staticmethod + def validate_configuration_setting(specification, name, value): + if not isinstance(value, specification.type): + if isinstance(specification.type, type): + type_names = specification.type.__name__ + else: + type_names = ', '.join(imap(lambda t: t.__name__, specification.type)) + raise ValueError('Expected {} value, not {}={}'.format(type_names, name, repr(value))) + if specification.constraint and not specification.constraint(value): + raise ValueError('Illegal value: {}={}'.format(name, repr(value))) + return value + + specification = namedtuple( + 'ConfigurationSettingSpecification', ( + 'type', + 'constraint', + 'supporting_protocols')) + + # P1 [ ] TODO: Review ConfigurationSettingsType.specification_matrix for completeness and correctness + + specification_matrix = { + 'clear_required_fields': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'distributed': specification( + type=bool, + constraint=None, + supporting_protocols=[2]), + 'generates_timeorder': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'generating': specification( + type=bool, + constraint=None, + supporting_protocols=[1, 2]), + 'local': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'maxinputs': specification( + type=int, + constraint=lambda value: 0 <= value <= six.MAXSIZE, + supporting_protocols=[2]), + 'overrides_timeorder': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'required_fields': specification( + type=(list, set, tuple), + constraint=None, + supporting_protocols=[1, 2]), + 'requires_preop': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'retainsevents': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'run_in_preview': specification( + type=bool, + constraint=None, + supporting_protocols=[2]), + 'streaming': specification( + type=bool, + constraint=None, + supporting_protocols=[1]), + 'streaming_preop': specification( + type=(bytes, six.text_type), + constraint=None, + supporting_protocols=[1, 2]), + 'type': specification( + type=(bytes, six.text_type), + constraint=lambda value: value in ('events', 'reporting', 'streaming'), + supporting_protocols=[2])} + + +class CsvDialect(csv.Dialect): + """ Describes the properties of Splunk CSV streams """ + delimiter = ',' + quotechar = '"' + doublequote = True + skipinitialspace = False + lineterminator = '\r\n' + if sys.version_info >= (3, 0) and sys.platform == 'win32': + lineterminator = '\n' + quoting = csv.QUOTE_MINIMAL + + +class InputHeader(dict): + """ Represents a Splunk input header as a collection of name/value pairs. + + """ + + def __str__(self): + return '\n'.join([name + ':' + value for name, value in six.iteritems(self)]) + + def read(self, ifile): + """ Reads an input header from an input file. + + The input header is read as a sequence of ****:**** pairs separated by a newline. The end of the + input header is signalled by an empty line or an end-of-file. + + :param ifile: File-like object that supports iteration over lines. + + """ + name, value = None, None + + for line in ifile: + if line == '\n': + break + item = line.split(':', 1) + if len(item) == 2: + # start of a new item + if name is not None: + self[name] = value[:-1] # value sans trailing newline + name, value = item[0], urllib.parse.unquote(item[1]) + elif name is not None: + # continuation of the current item + value += urllib.parse.unquote(line) + + if name is not None: + self[name] = value[:-1] if value[-1] == '\n' else value + + +Message = namedtuple('Message', ('type', 'text')) + + +class MetadataDecoder(JSONDecoder): + + def __init__(self): + JSONDecoder.__init__(self, object_hook=self._object_hook) + + @staticmethod + def _object_hook(dictionary): + + object_view = ObjectView(dictionary) + stack = deque() + stack.append((None, None, dictionary)) + + while len(stack): + instance, member_name, dictionary = stack.popleft() + + for name, value in six.iteritems(dictionary): + if isinstance(value, dict): + stack.append((dictionary, name, value)) + + if instance is not None: + instance[member_name] = ObjectView(dictionary) + + return object_view + + +class MetadataEncoder(JSONEncoder): + + def __init__(self): + JSONEncoder.__init__(self, separators=MetadataEncoder._separators) + + def default(self, o): + return o.__dict__ if isinstance(o, ObjectView) else JSONEncoder.default(self, o) + + _separators = (',', ':') + + +class ObjectView(object): + + def __init__(self, dictionary): + self.__dict__ = dictionary + + def __repr__(self): + return repr(self.__dict__) + + def __str__(self): + return str(self.__dict__) + + +class Recorder(object): + + def __init__(self, path, f): + self._recording = gzip.open(path + '.gz', 'wb') + self._file = f + + def __getattr__(self, name): + return getattr(self._file, name) + + def __iter__(self): + for line in self._file: + self._recording.write(line) + self._recording.flush() + yield line + + def read(self, size=None): + value = self._file.read() if size is None else self._file.read(size) + self._recording.write(value) + self._recording.flush() + return value + + def readline(self, size=None): + value = self._file.readline() if size is None else self._file.readline(size) + if len(value) > 0: + self._recording.write(value) + self._recording.flush() + return value + + def record(self, *args): + for arg in args: + self._recording.write(arg) + + def write(self, text): + self._recording.write(text) + self._file.write(text) + self._recording.flush() + + +class RecordWriter(object): + + def __init__(self, ofile, maxresultrows=None): + self._maxresultrows = 50000 if maxresultrows is None else maxresultrows + + self._ofile = set_binary_mode(ofile) + self._fieldnames = None + self._buffer = StringIO() + + self._writer = csv.writer(self._buffer, dialect=CsvDialect) + self._writerow = self._writer.writerow + self._finished = False + self._flushed = False + + self._inspector = OrderedDict() + self._chunk_count = 0 + self._pending_record_count = 0 + self._committed_record_count = 0 + + @property + def is_flushed(self): + return self._flushed + + @is_flushed.setter + def is_flushed(self, value): + self._flushed = True if value else False + + @property + def ofile(self): + return self._ofile + + @ofile.setter + def ofile(self, value): + self._ofile = set_binary_mode(value) + + @property + def pending_record_count(self): + return self._pending_record_count + + @property + def _record_count(self): + warnings.warn( + "_record_count will be deprecated soon. Use pending_record_count instead.", + PendingDeprecationWarning + ) + return self.pending_record_count + + @property + def committed_record_count(self): + return self._committed_record_count + + @property + def _total_record_count(self): + warnings.warn( + "_total_record_count will be deprecated soon. Use committed_record_count instead.", + PendingDeprecationWarning + ) + return self.committed_record_count + + def write(self, data): + bytes_type = bytes if sys.version_info >= (3, 0) else str + if not isinstance(data, bytes_type): + data = data.encode('utf-8') + self.ofile.write(data) + + def flush(self, finished=None, partial=None): + assert finished is None or isinstance(finished, bool) + assert partial is None or isinstance(partial, bool) + assert not (finished is None and partial is None) + assert finished is None or partial is None + self._ensure_validity() + + def write_message(self, message_type, message_text, *args, **kwargs): + self._ensure_validity() + self._inspector.setdefault('messages', []).append((message_type, message_text.format(*args, **kwargs))) + + def write_record(self, record): + self._ensure_validity() + self._write_record(record) + + def write_records(self, records): + self._ensure_validity() + write_record = self._write_record + for record in records: + write_record(record) + + def _clear(self): + self._buffer.seek(0) + self._buffer.truncate() + self._inspector.clear() + self._pending_record_count = 0 + + def _ensure_validity(self): + if self._finished is True: + assert self._record_count == 0 and len(self._inspector) == 0 + raise RuntimeError('I/O operation on closed record writer') + + def _write_record(self, record): + + fieldnames = self._fieldnames + + if fieldnames is None: + self._fieldnames = fieldnames = list(record.keys()) + value_list = imap(lambda fn: (str(fn), str('__mv_') + str(fn)), fieldnames) + self._writerow(list(chain.from_iterable(value_list))) + + get_value = record.get + values = [] + + for fieldname in fieldnames: + value = get_value(fieldname, None) + + if value is None: + values += (None, None) + continue + + value_t = type(value) + + if issubclass(value_t, (list, tuple)): + + if len(value) == 0: + values += (None, None) + continue + + if len(value) > 1: + value_list = value + sv = '' + mv = '$' + + for value in value_list: + + if value is None: + sv += '\n' + mv += '$;$' + continue + + value_t = type(value) + + if value_t is not bytes: + + if value_t is bool: + value = str(value.real) + elif value_t is six.text_type: + value = value + elif isinstance(value, six.integer_types) or value_t is float or value_t is complex: + value = str(value) + elif issubclass(value_t, (dict, list, tuple)): + value = str(''.join(RecordWriter._iterencode_json(value, 0))) + else: + value = repr(value).encode('utf-8', errors='backslashreplace') + + sv += value + '\n' + mv += value.replace('$', '$$') + '$;$' + + values += (sv[:-1], mv[:-2]) + continue + + value = value[0] + value_t = type(value) + + if value_t is bool: + values += (str(value.real), None) + continue + + if value_t is bytes: + values += (value, None) + continue + + if value_t is six.text_type: + if six.PY2: + value = value.encode('utf-8') + values += (value, None) + continue + + if isinstance(value, six.integer_types) or value_t is float or value_t is complex: + values += (str(value), None) + continue + + if issubclass(value_t, dict): + values += (str(''.join(RecordWriter._iterencode_json(value, 0))), None) + continue + + values += (repr(value), None) + + self._writerow(values) + self._pending_record_count += 1 + + if self.pending_record_count >= self._maxresultrows: + self.flush(partial=True) + + try: + # noinspection PyUnresolvedReferences + from _json import make_encoder + except ImportError: + # We may be running under PyPy 2.5 which does not include the _json module + _iterencode_json = JSONEncoder(separators=(',', ':')).iterencode + else: + # Creating _iterencode_json this way yields a two-fold performance improvement on Python 2.7.9 and 2.7.10 + from json.encoder import encode_basestring_ascii + + @staticmethod + def _default(o): + raise TypeError(repr(o) + ' is not JSON serializable') + + _iterencode_json = make_encoder( + {}, # markers (for detecting circular references) + _default, # object_encoder + encode_basestring_ascii, # string_encoder + None, # indent + ':', ',', # separators + False, # sort_keys + False, # skip_keys + True # allow_nan + ) + + del make_encoder + + +class RecordWriterV1(RecordWriter): + + def flush(self, finished=None, partial=None): + + RecordWriter.flush(self, finished, partial) # validates arguments and the state of this instance + + if self.pending_record_count > 0 or (self._chunk_count == 0 and 'messages' in self._inspector): + + messages = self._inspector.get('messages') + + if self._chunk_count == 0: + + # Messages are written to the messages header when we write the first chunk of data + # Guarantee: These messages are displayed by splunkweb and the job inspector + + if messages is not None: + + message_level = RecordWriterV1._message_level.get + + for level, text in messages: + self.write(message_level(level, level)) + self.write('=') + self.write(text) + self.write('\r\n') + + self.write('\r\n') + + elif messages is not None: + + # Messages are written to the messages header when we write subsequent chunks of data + # Guarantee: These messages are displayed by splunkweb and the job inspector, if and only if the + # command is configured with + # + # stderr_dest = message + # + # stderr_dest is a static configuration setting. This means that it can only be set in commands.conf. + # It cannot be set in code. + + stderr = sys.stderr + + for level, text in messages: + print(level, text, file=stderr) + + self.write(self._buffer.getvalue()) + self._chunk_count += 1 + self._committed_record_count += self.pending_record_count + self._clear() + + self._finished = finished is True + + _message_level = { + 'DEBUG': 'debug_message', + 'ERROR': 'error_message', + 'FATAL': 'error_message', + 'INFO': 'info_message', + 'WARN': 'warn_message' + } + + +class RecordWriterV2(RecordWriter): + + def flush(self, finished=None, partial=None): + + RecordWriter.flush(self, finished, partial) # validates arguments and the state of this instance + + if partial or not finished: + # Don't flush partial chunks, since the SCP v2 protocol does not + # provide a way to send partial chunks yet. + return + + if not self.is_flushed: + self.write_chunk(finished=True) + + def write_chunk(self, finished=None): + inspector = self._inspector + self._committed_record_count += self.pending_record_count + self._chunk_count += 1 + + # TODO: DVPL-6448: splunklib.searchcommands | Add support for partial: true when it is implemented in + # ChunkedExternProcessor (See SPL-103525) + # + # We will need to replace the following block of code with this block: + # + # metadata = [item for item in (('inspector', inspector), ('finished', finished), ('partial', partial))] + # + # if partial is True: + # finished = False + + if len(inspector) == 0: + inspector = None + + metadata = [item for item in (('inspector', inspector), ('finished', finished))] + self._write_chunk(metadata, self._buffer.getvalue()) + self._clear() + + def write_metadata(self, configuration): + self._ensure_validity() + + metadata = chain(six.iteritems(configuration), (('inspector', self._inspector if self._inspector else None),)) + self._write_chunk(metadata, '') + self.write('\n') + self._clear() + + def write_metric(self, name, value): + self._ensure_validity() + self._inspector['metric.' + name] = value + + def _clear(self): + super(RecordWriterV2, self)._clear() + self._fieldnames = None + + def _write_chunk(self, metadata, body): + + if metadata: + metadata = str(''.join(self._iterencode_json(dict([(n, v) for n, v in metadata if v is not None]), 0))) + if sys.version_info >= (3, 0): + metadata = metadata.encode('utf-8') + metadata_length = len(metadata) + else: + metadata_length = 0 + + if sys.version_info >= (3, 0): + body = body.encode('utf-8') + body_length = len(body) + + if not (metadata_length > 0 or body_length > 0): + return + + start_line = 'chunked 1.0,%s,%s\n' % (metadata_length, body_length) + self.write(start_line) + self.write(metadata) + self.write(body) + self._ofile.flush() + self._flushed = True diff --git a/bin/splunklib/searchcommands/reporting_command.py b/bin/splunklib/searchcommands/reporting_command.py new file mode 100644 index 0000000..9470861 --- /dev/null +++ b/bin/splunklib/searchcommands/reporting_command.py @@ -0,0 +1,281 @@ +# coding=utf-8 +# +# Copyright © 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from itertools import chain + +from .internals import ConfigurationSettingsType, json_encode_string +from .decorators import ConfigurationSetting, Option +from .streaming_command import StreamingCommand +from .search_command import SearchCommand +from .validators import Set +from splunklib import six + + +class ReportingCommand(SearchCommand): + """ Processes search result records and generates a reporting data structure. + + Reporting search commands run as either reduce or map/reduce operations. The reduce part runs on a search head and + is responsible for processing a single chunk of search results to produce the command's reporting data structure. + The map part is called a streaming preop. It feeds the reduce part with partial results and by default runs on the + search head and/or one or more indexers. + + You must implement a :meth:`reduce` method as a generator function that iterates over a set of event records and + yields a reporting data structure. You may implement a :meth:`map` method as a generator function that iterates + over a set of event records and yields :class:`dict` or :class:`list(dict)` instances. + + ReportingCommand configuration + ============================== + + Configure the :meth:`map` operation using a Configuration decorator on your :meth:`map` method. Configure it like + you would a :class:`StreamingCommand`. Configure the :meth:`reduce` operation using a Configuration decorator on + your :meth:`ReportingCommand` class. + + You can configure your command for operation under Search Command Protocol (SCP) version 1 or 2. SCP 2 requires + Splunk 6.3 or later. + + """ + # region Special methods + + def __init__(self): + SearchCommand.__init__(self) + + # endregion + + # region Options + + phase = Option(doc=''' + **Syntax:** phase=[map|reduce] + + **Description:** Identifies the phase of the current map-reduce operation. + + ''', default='reduce', validate=Set('map', 'reduce')) + + # endregion + + # region Methods + + def map(self, records): + """ Override this method to compute partial results. + + :param records: + :type records: + + You must override this method, if :code:`requires_preop=True`. + + """ + return NotImplemented + + def prepare(self): + + phase = self.phase + + if phase == 'map': + # noinspection PyUnresolvedReferences + self._configuration = self.map.ConfigurationSettings(self) + return + + if phase == 'reduce': + streaming_preop = chain((self.name, 'phase="map"', str(self._options)), self.fieldnames) + self._configuration.streaming_preop = ' '.join(streaming_preop) + return + + raise RuntimeError('Unrecognized reporting command phase: {}'.format(json_encode_string(six.text_type(phase)))) + + def reduce(self, records): + """ Override this method to produce a reporting data structure. + + You must override this method. + + """ + raise NotImplementedError('reduce(self, records)') + + def _execute(self, ifile, process): + SearchCommand._execute(self, ifile, getattr(self, self.phase)) + + # endregion + + # region Types + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + """ Represents the configuration settings for a :code:`ReportingCommand`. + + """ + # region SCP v1/v2 Properties + + required_fields = ConfigurationSetting(doc=''' + List of required fields for this search which back-propagates to the generating search. + + Setting this value enables selected fields mode under SCP 2. Under SCP 1 you must also specify + :code:`clear_required_fields=True` to enable selected fields mode. To explicitly select all fields, + specify a value of :const:`['*']`. No error is generated if a specified field is missing. + + Default: :const:`None`, which implicitly selects all fields. + + Supported by: SCP 1, SCP 2 + + ''') + + requires_preop = ConfigurationSetting(doc=''' + Indicates whether :meth:`ReportingCommand.map` is required for proper command execution. + + If :const:`True`, :meth:`ReportingCommand.map` is guaranteed to be called. If :const:`False`, Splunk + considers it to be an optimization that may be skipped. + + Default: :const:`False` + + Supported by: SCP 1, SCP 2 + + ''') + + streaming_preop = ConfigurationSetting(doc=''' + Denotes the requested streaming preop search string. + + Computed. + + Supported by: SCP 1, SCP 2 + + ''') + + # endregion + + # region SCP v1 Properties + + clear_required_fields = ConfigurationSetting(doc=''' + :const:`True`, if required_fields represent the *only* fields required. + + If :const:`False`, required_fields are additive to any fields that may be required by subsequent commands. + In most cases, :const:`True` is appropriate for reporting commands. + + Default: :const:`True` + + Supported by: SCP 1 + + ''') + + retainsevents = ConfigurationSetting(readonly=True, value=False, doc=''' + Signals that :meth:`ReportingCommand.reduce` transforms _raw events to produce a reporting data structure. + + Fixed: :const:`False` + + Supported by: SCP 1 + + ''') + + streaming = ConfigurationSetting(readonly=True, value=False, doc=''' + Signals that :meth:`ReportingCommand.reduce` runs on the search head. + + Fixed: :const:`False` + + Supported by: SCP 1 + + ''') + + # endregion + + # region SCP v2 Properties + + maxinputs = ConfigurationSetting(doc=''' + Specifies the maximum number of events that can be passed to the command for each invocation. + + This limit cannot exceed the value of `maxresultrows` in limits.conf_. Under SCP 1 you must specify this + value in commands.conf_. + + Default: The value of `maxresultrows`. + + Supported by: SCP 2 + + .. _limits.conf: http://docs.splunk.com/Documentation/Splunk/latest/admin/Limitsconf + + ''') + + run_in_preview = ConfigurationSetting(doc=''' + :const:`True`, if this command should be run to generate results for preview; not wait for final output. + + This may be important for commands that have side effects (e.g., outputlookup). + + Default: :const:`True` + + Supported by: SCP 2 + + ''') + + type = ConfigurationSetting(readonly=True, value='reporting', doc=''' + Command type name. + + Fixed: :const:`'reporting'`. + + Supported by: SCP 2 + + ''') + + # endregion + + # region Methods + + @classmethod + def fix_up(cls, command): + """ Verifies :code:`command` class structure and configures the :code:`command.map` method. + + Verifies that :code:`command` derives from :class:`ReportingCommand` and overrides + :code:`ReportingCommand.reduce`. It then configures :code:`command.reduce`, if an overriding implementation + of :code:`ReportingCommand.reduce` has been provided. + + :param command: :code:`ReportingCommand` class + + Exceptions: + + :code:`TypeError` :code:`command` class is not derived from :code:`ReportingCommand` + :code:`AttributeError` No :code:`ReportingCommand.reduce` override + + """ + if not issubclass(command, ReportingCommand): + raise TypeError('{} is not a ReportingCommand'.format( command)) + + if command.reduce == ReportingCommand.reduce: + raise AttributeError('No ReportingCommand.reduce override') + + if command.map == ReportingCommand.map: + cls._requires_preop = False + return + + f = vars(command)['map'] # Function backing the map method + + # EXPLANATION OF PREVIOUS STATEMENT: There is no way to add custom attributes to methods. See [Why does + # setattr fail on a method](http://stackoverflow.com/questions/7891277/why-does-setattr-fail-on-a-bound-method) for a discussion of this issue. + + try: + settings = f._settings + except AttributeError: + f.ConfigurationSettings = StreamingCommand.ConfigurationSettings + return + + # Create new StreamingCommand.ConfigurationSettings class + + module = command.__module__ + '.' + command.__name__ + '.map' + name = b'ConfigurationSettings' + bases = (StreamingCommand.ConfigurationSettings,) + + f.ConfigurationSettings = ConfigurationSettingsType(module, name, bases) + ConfigurationSetting.fix_up(f.ConfigurationSettings, settings) + del f._settings + + pass + # endregion + + pass + # endregion diff --git a/bin/splunklib/searchcommands/search_command.py b/bin/splunklib/searchcommands/search_command.py new file mode 100644 index 0000000..7383a5e --- /dev/null +++ b/bin/splunklib/searchcommands/search_command.py @@ -0,0 +1,1127 @@ +# coding=utf-8 +# +# Copyright © 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +# Absolute imports + +from collections import namedtuple + +import io + +try: + from collections import OrderedDict # must be python 2.7 +except ImportError: + from ..ordereddict import OrderedDict +from copy import deepcopy +from splunklib.six.moves import StringIO +from itertools import chain, islice +from splunklib.six.moves import filter as ifilter, map as imap, zip as izip +from splunklib import six +if six.PY2: + from logging import _levelNames, getLevelName, getLogger +else: + from logging import _nameToLevel as _levelNames, getLevelName, getLogger +try: + from shutil import make_archive +except ImportError: + # Used for recording, skip on python 2.6 + pass +from time import time +from splunklib.six.moves.urllib.parse import unquote +from splunklib.six.moves.urllib.parse import urlsplit +from warnings import warn +from xml.etree import ElementTree + +import os +import sys +import re +import csv +import tempfile +import traceback + +# Relative imports + +from .internals import ( + CommandLineParser, + CsvDialect, + InputHeader, + Message, + MetadataDecoder, + MetadataEncoder, + ObjectView, + Recorder, + RecordWriterV1, + RecordWriterV2, + json_encode_string) + +from . import Boolean, Option, environment +from ..client import Service + + +# ---------------------------------------------------------------------------------------------------------------------- + +# P1 [ ] TODO: Log these issues against ChunkedExternProcessor +# +# 1. Implement requires_preop configuration setting. +# This configuration setting is currently rejected by ChunkedExternProcessor. +# +# 2. Rename type=events as type=eventing for symmetry with type=reporting and type=streaming +# Eventing commands process records on the events pipeline. This change effects ChunkedExternProcessor.cpp, +# eventing_command.py, and generating_command.py. +# +# 3. For consistency with SCPV1, commands.conf should not require filename setting when chunked = true +# The SCPV1 processor uses .py as the default filename. The ChunkedExternProcessor should do the same. + +# P1 [ ] TODO: Verify that ChunkedExternProcessor complains if a streaming_preop has a type other than 'streaming' +# It once looked like sending type='reporting' for the streaming_preop was accepted. + +# ---------------------------------------------------------------------------------------------------------------------- + +# P2 [ ] TODO: Consider bumping None formatting up to Option.Item.__str__ + + +class SearchCommand(object): + """ Represents a custom search command. + + """ + + def __init__(self): + + # Variables that may be used, but not altered by derived classes + + class_name = self.__class__.__name__ + + self._logger, self._logging_configuration = getLogger(class_name), environment.logging_configuration + + # Variables backing option/property values + + self._configuration = self.ConfigurationSettings(self) + self._input_header = InputHeader() + self._fieldnames = None + self._finished = None + self._metadata = None + self._options = None + self._protocol_version = None + self._search_results_info = None + self._service = None + + # Internal variables + + self._default_logging_level = self._logger.level + self._record_writer = None + self._records = None + + def __str__(self): + text = ' '.join(chain((type(self).name, str(self.options)), [] if self.fieldnames is None else self.fieldnames)) + return text + + # region Options + + @Option + def logging_configuration(self): + """ **Syntax:** logging_configuration= + + **Description:** Loads an alternative logging configuration file for + a command invocation. The logging configuration file must be in Python + ConfigParser-format. Path names are relative to the app root directory. + + """ + return self._logging_configuration + + @logging_configuration.setter + def logging_configuration(self, value): + self._logger, self._logging_configuration = environment.configure_logging(self.__class__.__name__, value) + + @Option + def logging_level(self): + """ **Syntax:** logging_level=[CRITICAL|ERROR|WARNING|INFO|DEBUG|NOTSET] + + **Description:** Sets the threshold for the logger of this command invocation. Logging messages less severe than + `logging_level` will be ignored. + + """ + return getLevelName(self._logger.getEffectiveLevel()) + + @logging_level.setter + def logging_level(self, value): + if value is None: + value = self._default_logging_level + if isinstance(value, (bytes, six.text_type)): + try: + level = _levelNames[value.upper()] + except KeyError: + raise ValueError('Unrecognized logging level: {}'.format(value)) + else: + try: + level = int(value) + except ValueError: + raise ValueError('Unrecognized logging level: {}'.format(value)) + self._logger.setLevel(level) + + record = Option(doc=''' + **Syntax: record= + + **Description:** When `true`, records the interaction between the command and splunkd. Defaults to `false`. + + ''', default=False, validate=Boolean()) + + show_configuration = Option(doc=''' + **Syntax:** show_configuration= + + **Description:** When `true`, reports command configuration as an informational message. Defaults to `false`. + + ''', default=False, validate=Boolean()) + + # endregion + + # region Properties + + @property + def configuration(self): + """ Returns the configuration settings for this command. + + """ + return self._configuration + + @property + def fieldnames(self): + """ Returns the fieldnames specified as argument to this command. + + """ + return self._fieldnames + + @fieldnames.setter + def fieldnames(self, value): + self._fieldnames = value + + @property + def input_header(self): + """ Returns the input header for this command. + + :return: The input header for this command. + :rtype: InputHeader + + """ + warn( + 'SearchCommand.input_header is deprecated and will be removed in a future release. ' + 'Please use SearchCommand.metadata instead.', DeprecationWarning, 2) + return self._input_header + + @property + def logger(self): + """ Returns the logger for this command. + + :return: The logger for this command. + :rtype: + + """ + return self._logger + + @property + def metadata(self): + return self._metadata + + @property + def options(self): + """ Returns the options specified as argument to this command. + + """ + if self._options is None: + self._options = Option.View(self) + return self._options + + @property + def protocol_version(self): + return self._protocol_version + + @property + def search_results_info(self): + """ Returns the search results info for this command invocation. + + The search results info object is created from the search results info file associated with the command + invocation. + + :return: Search results info:const:`None`, if the search results info file associated with the command + invocation is inaccessible. + :rtype: SearchResultsInfo or NoneType + + """ + if self._search_results_info is not None: + return self._search_results_info + + if self._protocol_version == 1: + try: + path = self._input_header['infoPath'] + except KeyError: + return None + else: + assert self._protocol_version == 2 + + try: + dispatch_dir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + return None + + path = os.path.join(dispatch_dir, 'info.csv') + + try: + with io.open(path, 'r') as f: + reader = csv.reader(f, dialect=CsvDialect) + fields = next(reader) + values = next(reader) + except IOError as error: + if error.errno == 2: + self.logger.error('Search results info file {} does not exist.'.format(json_encode_string(path))) + return + raise + + def convert_field(field): + return (field[1:] if field[0] == '_' else field).replace('.', '_') + + decode = MetadataDecoder().decode + + def convert_value(value): + try: + return decode(value) if len(value) > 0 else value + except ValueError: + return value + + info = ObjectView(dict(imap(lambda f_v: (convert_field(f_v[0]), convert_value(f_v[1])), izip(fields, values)))) + + try: + count_map = info.countMap + except AttributeError: + pass + else: + count_map = count_map.split(';') + n = len(count_map) + info.countMap = dict(izip(islice(count_map, 0, n, 2), islice(count_map, 1, n, 2))) + + try: + msg_type = info.msgType + msg_text = info.msg + except AttributeError: + pass + else: + messages = ifilter(lambda t_m: t_m[0] or t_m[1], izip(msg_type.split('\n'), msg_text.split('\n'))) + info.msg = [Message(message) for message in messages] + del info.msgType + + try: + info.vix_families = ElementTree.fromstring(info.vix_families) + except AttributeError: + pass + + self._search_results_info = info + return info + + @property + def service(self): + """ Returns a Splunk service object for this command invocation or None. + + The service object is created from the Splunkd URI and authentication token passed to the command invocation in + the search results info file. This data is not passed to a command invocation by default. You must request it by + specifying this pair of configuration settings in commands.conf: + + .. code-block:: python + + enableheader = true + requires_srinfo = true + + The :code:`enableheader` setting is :code:`true` by default. Hence, you need not set it. The + :code:`requires_srinfo` setting is false by default. Hence, you must set it. + + :return: :class:`splunklib.client.Service`, if :code:`enableheader` and :code:`requires_srinfo` are both + :code:`true`. Otherwise, if either :code:`enableheader` or :code:`requires_srinfo` are :code:`false`, a value + of :code:`None` is returned. + + """ + if self._service is not None: + return self._service + + metadata = self._metadata + + if metadata is None: + return None + + try: + searchinfo = self._metadata.searchinfo + except AttributeError: + return None + + splunkd_uri = searchinfo.splunkd_uri + + if splunkd_uri is None: + return None + + uri = urlsplit(splunkd_uri, allow_fragments=False) + + self._service = Service( + scheme=uri.scheme, host=uri.hostname, port=uri.port, app=searchinfo.app, token=searchinfo.session_key) + + return self._service + + # endregion + + # region Methods + + def error_exit(self, error, message=None): + self.write_error(error.message if message is None else message) + self.logger.error('Abnormal exit: %s', error) + exit(1) + + def finish(self): + """ Flushes the output buffer and signals that this command has finished processing data. + + :return: :const:`None` + + """ + self._record_writer.flush(finished=True) + + def flush(self): + """ Flushes the output buffer. + + :return: :const:`None` + + """ + self._record_writer.flush(finished=False) + + def prepare(self): + """ Prepare for execution. + + This method should be overridden in search command classes that wish to examine and update their configuration + or option settings prior to execution. It is called during the getinfo exchange before command metadata is sent + to splunkd. + + :return: :const:`None` + :rtype: NoneType + + """ + pass + + def process(self, argv=sys.argv, ifile=sys.stdin, ofile=sys.stdout): + """ Process data. + + :param argv: Command line arguments. + :type argv: list or tuple + + :param ifile: Input data file. + :type ifile: file + + :param ofile: Output data file. + :type ofile: file + + :return: :const:`None` + :rtype: NoneType + + """ + if len(argv) > 1: + self._process_protocol_v1(argv, ifile, ofile) + else: + self._process_protocol_v2(argv, ifile, ofile) + + def _map_input_header(self): + metadata = self._metadata + searchinfo = metadata.searchinfo + self._input_header.update( + allowStream=None, + infoPath=os.path.join(searchinfo.dispatch_dir, 'info.csv'), + keywords=None, + preview=metadata.preview, + realtime=searchinfo.earliest_time != 0 and searchinfo.latest_time != 0, + search=searchinfo.search, + sid=searchinfo.sid, + splunkVersion=searchinfo.splunk_version, + truncated=None) + + def _map_metadata(self, argv): + source = SearchCommand._MetadataSource(argv, self._input_header, self.search_results_info) + + def _map(metadata_map): + metadata = {} + + for name, value in six.iteritems(metadata_map): + if isinstance(value, dict): + value = _map(value) + else: + transform, extract = value + if extract is None: + value = None + else: + value = extract(source) + if not (value is None or transform is None): + value = transform(value) + metadata[name] = value + + return ObjectView(metadata) + + self._metadata = _map(SearchCommand._metadata_map) + + _metadata_map = { + 'action': + (lambda v: 'getinfo' if v == '__GETINFO__' else 'execute' if v == '__EXECUTE__' else None, lambda s: s.argv[1]), + 'preview': + (bool, lambda s: s.input_header.get('preview')), + 'searchinfo': { + 'app': + (lambda v: v.ppc_app, lambda s: s.search_results_info), + 'args': + (None, lambda s: s.argv), + 'dispatch_dir': + (os.path.dirname, lambda s: s.input_header.get('infoPath')), + 'earliest_time': + (lambda v: float(v.rt_earliest) if len(v.rt_earliest) > 0 else 0.0, lambda s: s.search_results_info), + 'latest_time': + (lambda v: float(v.rt_latest) if len(v.rt_latest) > 0 else 0.0, lambda s: s.search_results_info), + 'owner': + (None, None), + 'raw_args': + (None, lambda s: s.argv), + 'search': + (unquote, lambda s: s.input_header.get('search')), + 'session_key': + (lambda v: v.auth_token, lambda s: s.search_results_info), + 'sid': + (None, lambda s: s.input_header.get('sid')), + 'splunk_version': + (None, lambda s: s.input_header.get('splunkVersion')), + 'splunkd_uri': + (lambda v: v.splunkd_uri, lambda s: s.search_results_info), + 'username': + (lambda v: v.ppc_user, lambda s: s.search_results_info)}} + + _MetadataSource = namedtuple('Source', ('argv', 'input_header', 'search_results_info')) + + def _prepare_protocol_v1(self, argv, ifile, ofile): + + debug = environment.splunklib_logger.debug + + # Provide as much context as possible in advance of parsing the command line and preparing for execution + + self._input_header.read(ifile) + self._protocol_version = 1 + self._map_metadata(argv) + + debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) + + try: + tempfile.tempdir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + raise RuntimeError('{}.metadata.searchinfo.dispatch_dir is undefined'.format(self.__class__.__name__)) + + debug(' tempfile.tempdir=%r', tempfile.tempdir) + + CommandLineParser.parse(self, argv[2:]) + self.prepare() + + if self.record: + self.record = False + + record_argv = [argv[0], argv[1], str(self._options), ' '.join(self.fieldnames)] + ifile, ofile = self._prepare_recording(record_argv, ifile, ofile) + self._record_writer.ofile = ofile + ifile.record(str(self._input_header), '\n\n') + + if self.show_configuration: + self.write_info(self.name + ' command configuration: ' + str(self._configuration)) + + return ifile # wrapped, if self.record is True + + def _prepare_recording(self, argv, ifile, ofile): + + # Create the recordings directory, if it doesn't already exist + + recordings = os.path.join(environment.splunk_home, 'var', 'run', 'splunklib.searchcommands', 'recordings') + + if not os.path.isdir(recordings): + os.makedirs(recordings) + + # Create input/output recorders from ifile and ofile + + recording = os.path.join(recordings, self.__class__.__name__ + '-' + repr(time()) + '.' + self._metadata.action) + ifile = Recorder(recording + '.input', ifile) + ofile = Recorder(recording + '.output', ofile) + + # Archive the dispatch directory--if it exists--so that it can be used as a baseline in mocks) + + dispatch_dir = self._metadata.searchinfo.dispatch_dir + + if dispatch_dir is not None: # __GETINFO__ action does not include a dispatch_dir + root_dir, base_dir = os.path.split(dispatch_dir) + make_archive(recording + '.dispatch_dir', 'gztar', root_dir, base_dir, logger=self.logger) + + # Save a splunk command line because it is useful for developing tests + + with open(recording + '.splunk_cmd', 'wb') as f: + f.write('splunk cmd python '.encode()) + f.write(os.path.basename(argv[0]).encode()) + for arg in islice(argv, 1, len(argv)): + f.write(' '.encode()) + f.write(arg.encode()) + + return ifile, ofile + + def _process_protocol_v1(self, argv, ifile, ofile): + + debug = environment.splunklib_logger.debug + class_name = self.__class__.__name__ + + debug('%s.process started under protocol_version=1', class_name) + self._record_writer = RecordWriterV1(ofile) + + # noinspection PyBroadException + try: + if argv[1] == '__GETINFO__': + + debug('Writing configuration settings') + + ifile = self._prepare_protocol_v1(argv, ifile, ofile) + self._record_writer.write_record(dict( + (n, ','.join(v) if isinstance(v, (list, tuple)) else v) for n, v in six.iteritems(self._configuration))) + self.finish() + + elif argv[1] == '__EXECUTE__': + + debug('Executing') + + ifile = self._prepare_protocol_v1(argv, ifile, ofile) + self._records = self._records_protocol_v1 + self._metadata.action = 'execute' + self._execute(ifile, None) + + else: + message = ( + 'Command {0} appears to be statically configured for search command protocol version 1 and static ' + 'configuration is unsupported by splunklib.searchcommands. Please ensure that ' + 'default/commands.conf contains this stanza:\n' + '[{0}]\n' + 'filename = {1}\n' + 'enableheader = true\n' + 'outputheader = true\n' + 'requires_srinfo = true\n' + 'supports_getinfo = true\n' + 'supports_multivalues = true\n' + 'supports_rawargs = true'.format(self.name, os.path.basename(argv[0]))) + raise RuntimeError(message) + + except (SyntaxError, ValueError) as error: + self.write_error(six.text_type(error)) + self.flush() + exit(0) + + except SystemExit: + self.flush() + raise + + except: + self._report_unexpected_error() + self.flush() + exit(1) + + debug('%s.process finished under protocol_version=1', class_name) + + def _process_protocol_v2(self, argv, ifile, ofile): + """ Processes records on the `input stream optionally writing records to the output stream. + + :param ifile: Input file object. + :type ifile: file or InputType + + :param ofile: Output file object. + :type ofile: file or OutputType + + :return: :const:`None` + + """ + debug = environment.splunklib_logger.debug + class_name = self.__class__.__name__ + + debug('%s.process started under protocol_version=2', class_name) + self._protocol_version = 2 + + # Read search command metadata from splunkd + # noinspection PyBroadException + try: + debug('Reading metadata') + metadata, body = self._read_chunk(self._as_binary_stream(ifile)) + + action = getattr(metadata, 'action', None) + + if action != 'getinfo': + raise RuntimeError('Expected getinfo action, not {}'.format(action)) + + if len(body) > 0: + raise RuntimeError('Did not expect data for getinfo action') + + self._metadata = deepcopy(metadata) + + searchinfo = self._metadata.searchinfo + + searchinfo.earliest_time = float(searchinfo.earliest_time) + searchinfo.latest_time = float(searchinfo.latest_time) + searchinfo.search = unquote(searchinfo.search) + + self._map_input_header() + + debug(' metadata=%r, input_header=%r', self._metadata, self._input_header) + + try: + tempfile.tempdir = self._metadata.searchinfo.dispatch_dir + except AttributeError: + raise RuntimeError('%s.metadata.searchinfo.dispatch_dir is undefined'.format(class_name)) + + debug(' tempfile.tempdir=%r', tempfile.tempdir) + except: + self._record_writer = RecordWriterV2(ofile) + self._report_unexpected_error() + self.finish() + exit(1) + + # Write search command configuration for consumption by splunkd + # noinspection PyBroadException + try: + self._record_writer = RecordWriterV2(ofile, getattr(self._metadata.searchinfo, 'maxresultrows', None)) + self.fieldnames = [] + self.options.reset() + + args = self.metadata.searchinfo.args + error_count = 0 + + debug('Parsing arguments') + + if args and type(args) == list: + for arg in args: + result = arg.split('=', 1) + if len(result) == 1: + self.fieldnames.append(str(result[0])) + else: + name, value = result + name = str(name) + try: + option = self.options[name] + except KeyError: + self.write_error('Unrecognized option: {}={}'.format(name, value)) + error_count += 1 + continue + try: + option.value = value + except ValueError: + self.write_error('Illegal value: {}={}'.format(name, value)) + error_count += 1 + continue + + missing = self.options.get_missing() + + if missing is not None: + if len(missing) == 1: + self.write_error('A value for "{}" is required'.format(missing[0])) + else: + self.write_error('Values for these required options are missing: {}'.format(', '.join(missing))) + error_count += 1 + + if error_count > 0: + exit(1) + + debug(' command: %s', six.text_type(self)) + + debug('Preparing for execution') + self.prepare() + + if self.record: + + ifile, ofile = self._prepare_recording(argv, ifile, ofile) + self._record_writer.ofile = ofile + + # Record the metadata that initiated this command after removing the record option from args/raw_args + + info = self._metadata.searchinfo + + for attr in 'args', 'raw_args': + setattr(info, attr, [arg for arg in getattr(info, attr) if not arg.startswith('record=')]) + + metadata = MetadataEncoder().encode(self._metadata) + ifile.record('chunked 1.0,', six.text_type(len(metadata)), ',0\n', metadata) + + if self.show_configuration: + self.write_info(self.name + ' command configuration: ' + str(self._configuration)) + + debug(' command configuration: %s', self._configuration) + + except SystemExit: + self._record_writer.write_metadata(self._configuration) + self.finish() + raise + except: + self._record_writer.write_metadata(self._configuration) + self._report_unexpected_error() + self.finish() + exit(1) + + self._record_writer.write_metadata(self._configuration) + + # Execute search command on data passing through the pipeline + # noinspection PyBroadException + try: + debug('Executing under protocol_version=2') + self._metadata.action = 'execute' + self._execute(ifile, None) + except SystemExit: + self.finish() + raise + except: + self._report_unexpected_error() + self.finish() + exit(1) + + debug('%s.process completed', class_name) + + def write_debug(self, message, *args): + self._record_writer.write_message('DEBUG', message, *args) + + def write_error(self, message, *args): + self._record_writer.write_message('ERROR', message, *args) + + def write_fatal(self, message, *args): + self._record_writer.write_message('FATAL', message, *args) + + def write_info(self, message, *args): + self._record_writer.write_message('INFO', message, *args) + + def write_warning(self, message, *args): + self._record_writer.write_message('WARN', message, *args) + + def write_metric(self, name, value): + """ Writes a metric that will be added to the search inspector. + + :param name: Name of the metric. + :type name: basestring + + :param value: A 4-tuple containing the value of metric ``name`` where + + value[0] = Elapsed seconds or :const:`None`. + value[1] = Number of invocations or :const:`None`. + value[2] = Input count or :const:`None`. + value[3] = Output count or :const:`None`. + + The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. + The :data:`SearchMetric` type provides a convenient encapsulation of ``value``. + + :return: :const:`None`. + + """ + self._record_writer.write_metric(name, value) + + # P2 [ ] TODO: Support custom inspector values + + @staticmethod + def _decode_list(mv): + return [match.replace('$$', '$') for match in SearchCommand._encoded_value.findall(mv)] + + _encoded_value = re.compile(r'\$(?P(?:\$\$|[^$])*)\$(?:;|$)') # matches a single value in an encoded list + + # Note: Subclasses must override this method so that it can be called + # called as self._execute(ifile, None) + def _execute(self, ifile, process): + """ Default processing loop + + :param ifile: Input file object. + :type ifile: file + + :param process: Bound method to call in processing loop. + :type process: instancemethod + + :return: :const:`None`. + :rtype: NoneType + + """ + if self.protocol_version == 1: + self._record_writer.write_records(process(self._records(ifile))) + self.finish() + else: + assert self._protocol_version == 2 + self._execute_v2(ifile, process) + + @staticmethod + def _as_binary_stream(ifile): + naught = ifile.read(0) + if isinstance(naught, bytes): + return ifile + + try: + return ifile.buffer + except AttributeError as error: + raise RuntimeError('Failed to get underlying buffer: {}'.format(error)) + + @staticmethod + def _read_chunk(istream): + # noinspection PyBroadException + assert isinstance(istream.read(0), six.binary_type), 'Stream must be binary' + + try: + header = istream.readline() + except Exception as error: + raise RuntimeError('Failed to read transport header: {}'.format(error)) + + if not header: + return None + + match = SearchCommand._header.match(six.ensure_str(header)) + + if match is None: + raise RuntimeError('Failed to parse transport header: {}'.format(header)) + + metadata_length, body_length = match.groups() + metadata_length = int(metadata_length) + body_length = int(body_length) + + try: + metadata = istream.read(metadata_length) + except Exception as error: + raise RuntimeError('Failed to read metadata of length {}: {}'.format(metadata_length, error)) + + decoder = MetadataDecoder() + + try: + metadata = decoder.decode(six.ensure_str(metadata)) + except Exception as error: + raise RuntimeError('Failed to parse metadata of length {}: {}'.format(metadata_length, error)) + + # if body_length <= 0: + # return metadata, '' + + body = "" + try: + if body_length > 0: + body = istream.read(body_length) + except Exception as error: + raise RuntimeError('Failed to read body of length {}: {}'.format(body_length, error)) + + return metadata, six.ensure_str(body) + + _header = re.compile(r'chunked\s+1.0\s*,\s*(\d+)\s*,\s*(\d+)\s*\n') + + def _records_protocol_v1(self, ifile): + return self._read_csv_records(ifile) + + def _read_csv_records(self, ifile): + reader = csv.reader(ifile, dialect=CsvDialect) + + try: + fieldnames = next(reader) + except StopIteration: + return + + mv_fieldnames = dict([(name, name[len('__mv_'):]) for name in fieldnames if name.startswith('__mv_')]) + + if len(mv_fieldnames) == 0: + for values in reader: + yield OrderedDict(izip(fieldnames, values)) + return + + for values in reader: + record = OrderedDict() + for fieldname, value in izip(fieldnames, values): + if fieldname.startswith('__mv_'): + if len(value) > 0: + record[mv_fieldnames[fieldname]] = self._decode_list(value) + elif fieldname not in record: + record[fieldname] = value + yield record + + def _execute_v2(self, ifile, process): + istream = self._as_binary_stream(ifile) + + while True: + result = self._read_chunk(istream) + + if not result: + return + + metadata, body = result + action = getattr(metadata, 'action', None) + if action != 'execute': + raise RuntimeError('Expected execute action, not {}'.format(action)) + + self._finished = getattr(metadata, 'finished', False) + self._record_writer.is_flushed = False + + self._execute_chunk_v2(process, result) + + self._record_writer.write_chunk(finished=self._finished) + + def _execute_chunk_v2(self, process, chunk): + metadata, body = chunk + + if len(body) <= 0: + return + + records = self._read_csv_records(StringIO(body)) + self._record_writer.write_records(process(records)) + + + def _report_unexpected_error(self): + + error_type, error, tb = sys.exc_info() + origin = tb + + while origin.tb_next is not None: + origin = origin.tb_next + + filename = origin.tb_frame.f_code.co_filename + lineno = origin.tb_lineno + message = '{0} at "{1}", line {2:d} : {3}'.format(error_type.__name__, filename, lineno, error) + + environment.splunklib_logger.error(message + '\nTraceback:\n' + ''.join(traceback.format_tb(tb))) + self.write_error(message) + + # endregion + + # region Types + + class ConfigurationSettings(object): + """ Represents the configuration settings common to all :class:`SearchCommand` classes. + + """ + def __init__(self, command): + self.command = command + + def __repr__(self): + """ Converts the value of this instance to its string representation. + + The value of this ConfigurationSettings instance is represented as a string of comma-separated + :code:`(name, value)` pairs. + + :return: String representation of this instance + + """ + definitions = type(self).configuration_setting_definitions + settings = imap( + lambda setting: repr((setting.name, setting.__get__(self), setting.supporting_protocols)), definitions) + return '[' + ', '.join(settings) + ']' + + def __str__(self): + """ Converts the value of this instance to its string representation. + + The value of this ConfigurationSettings instance is represented as a string of comma-separated + :code:`name=value` pairs. Items with values of :const:`None` are filtered from the list. + + :return: String representation of this instance + + """ + #text = ', '.join(imap(lambda (name, value): name + '=' + json_encode_string(unicode(value)), self.iteritems())) + text = ', '.join(['{}={}'.format(name, json_encode_string(six.text_type(value))) for (name, value) in six.iteritems(self)]) + return text + + # region Methods + + @classmethod + def fix_up(cls, command_class): + """ Adjusts and checks this class and its search command class. + + Derived classes typically override this method. It is used by the :decorator:`Configuration` decorator to + fix up the :class:`SearchCommand` class it adorns. This method is overridden by :class:`EventingCommand`, + :class:`GeneratingCommand`, :class:`ReportingCommand`, and :class:`StreamingCommand`, the base types for + all other search commands. + + :param command_class: Command class targeted by this class + + """ + return + + # TODO: Stop looking like a dictionary because we don't obey the semantics + # N.B.: Does not use Python 2 dict copy semantics + def iteritems(self): + definitions = type(self).configuration_setting_definitions + version = self.command.protocol_version + return ifilter( + lambda name_value1: name_value1[1] is not None, imap( + lambda setting: (setting.name, setting.__get__(self)), ifilter( + lambda setting: setting.is_supported_by_protocol(version), definitions))) + + # N.B.: Does not use Python 3 dict view semantics + if not six.PY2: + items = iteritems + + pass # endregion + + pass # endregion + + +SearchMetric = namedtuple('SearchMetric', ('elapsed_seconds', 'invocation_count', 'input_count', 'output_count')) + + + +def dispatch(command_class, argv=sys.argv, input_file=sys.stdin, output_file=sys.stdout, module_name=None): + """ Instantiates and executes a search command class + + This function implements a `conditional script stanza `_ based on the value of + :code:`module_name`:: + + if module_name is None or module_name == '__main__': + # execute command + + Call this function at module scope with :code:`module_name=__name__`, if you would like your module to act as either + a reusable module or a standalone program. Otherwise, if you wish this function to unconditionally instantiate and + execute :code:`command_class`, pass :const:`None` as the value of :code:`module_name`. + + :param command_class: Search command class to instantiate and execute. + :type command_class: type + :param argv: List of arguments to the command. + :type argv: list or tuple + :param input_file: File from which the command will read data. + :type input_file: :code:`file` + :param output_file: File to which the command will write data. + :type output_file: :code:`file` + :param module_name: Name of the module calling :code:`dispatch` or :const:`None`. + :type module_name: :code:`basestring` + :returns: :const:`None` + + **Example** + + .. code-block:: python + :linenos: + + #!/usr/bin/env python + from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators + @Configuration() + class SomeStreamingCommand(StreamingCommand): + ... + def stream(records): + ... + dispatch(SomeStreamingCommand, module_name=__name__) + + Dispatches the :code:`SomeStreamingCommand`, if and only if :code:`__name__` is equal to :code:`'__main__'`. + + **Example** + + .. code-block:: python + :linenos: + + from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators + @Configuration() + class SomeStreamingCommand(StreamingCommand): + ... + def stream(records): + ... + dispatch(SomeStreamingCommand) + + Unconditionally dispatches :code:`SomeStreamingCommand`. + + """ + assert issubclass(command_class, SearchCommand) + + if module_name is None or module_name == '__main__': + command_class().process(argv, input_file, output_file) diff --git a/bin/splunklib/searchcommands/streaming_command.py b/bin/splunklib/searchcommands/streaming_command.py new file mode 100644 index 0000000..fa075ed --- /dev/null +++ b/bin/splunklib/searchcommands/streaming_command.py @@ -0,0 +1,195 @@ +# coding=utf-8 +# +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from splunklib import six +from splunklib.six.moves import map as imap, filter as ifilter + +from .decorators import ConfigurationSetting +from .search_command import SearchCommand + + +class StreamingCommand(SearchCommand): + """ Applies a transformation to search results as they travel through the streams pipeline. + + Streaming commands typically filter, augment, or update, search result records. Splunk will send them in batches of + up to 50,000 records. Hence, a search command must be prepared to be invoked many times during the course of + pipeline processing. Each invocation should produce a set of results independently usable by downstream processors. + + By default Splunk may choose to run a streaming command locally on a search head and/or remotely on one or more + indexers concurrently. The size and frequency of the search result batches sent to the command will vary based + on scheduling considerations. + + StreamingCommand configuration + ============================== + + You can configure your command for operation under Search Command Protocol (SCP) version 1 or 2. SCP 2 requires + Splunk 6.3 or later. + + """ + # region Methods + + def stream(self, records): + """ Generator function that processes and yields event records to the Splunk stream pipeline. + + You must override this method. + + """ + raise NotImplementedError('StreamingCommand.stream(self, records)') + + def _execute(self, ifile, process): + SearchCommand._execute(self, ifile, self.stream) + + # endregion + + class ConfigurationSettings(SearchCommand.ConfigurationSettings): + """ Represents the configuration settings that apply to a :class:`StreamingCommand`. + + """ + # region SCP v1/v2 properties + + required_fields = ConfigurationSetting(doc=''' + List of required fields for this search which back-propagates to the generating search. + + Setting this value enables selected fields mode under SCP 2. Under SCP 1 you must also specify + :code:`clear_required_fields=True` to enable selected fields mode. To explicitly select all fields, + specify a value of :const:`['*']`. No error is generated if a specified field is missing. + + Default: :const:`None`, which implicitly selects all fields. + + Supported by: SCP 1, SCP 2 + + ''') + + # endregion + + # region SCP v1 properties + + clear_required_fields = ConfigurationSetting(doc=''' + :const:`True`, if required_fields represent the *only* fields required. + + If :const:`False`, required_fields are additive to any fields that may be required by subsequent commands. + In most cases, :const:`False` is appropriate for streaming commands. + + Default: :const:`False` + + Supported by: SCP 1 + + ''') + + local = ConfigurationSetting(doc=''' + :const:`True`, if the command should run locally on the search head. + + Default: :const:`False` + + Supported by: SCP 1 + + ''') + + overrides_timeorder = ConfigurationSetting(doc=''' + :const:`True`, if the command changes the order of events with respect to time. + + Default: :const:`False` + + Supported by: SCP 1 + + ''') + + streaming = ConfigurationSetting(readonly=True, value=True, doc=''' + Specifies that the command is streamable. + + Fixed: :const:`True` + + Supported by: SCP 1 + + ''') + + # endregion + + # region SCP v2 Properties + + distributed = ConfigurationSetting(value=True, doc=''' + :const:`True`, if this command should be distributed to indexers. + + Under SCP 1 you must either specify `local = False` or include this line in commands.conf_, if this command + should be distributed to indexers. + + ..code: + local = true + + Default: :const:`True` + + Supported by: SCP 2 + + .. commands.conf_: http://docs.splunk.com/Documentation/Splunk/latest/Admin/Commandsconf + + ''') + + maxinputs = ConfigurationSetting(doc=''' + Specifies the maximum number of events that can be passed to the command for each invocation. + + This limit cannot exceed the value of `maxresultrows` in limits.conf. Under SCP 1 you must specify this + value in commands.conf_. + + Default: The value of `maxresultrows`. + + Supported by: SCP 2 + + ''') + + type = ConfigurationSetting(readonly=True, value='streaming', doc=''' + Command type name. + + Fixed: :const:`'streaming'` + + Supported by: SCP 2 + + ''') + + # endregion + + # region Methods + + @classmethod + def fix_up(cls, command): + """ Verifies :code:`command` class structure. + + """ + if command.stream == StreamingCommand.stream: + raise AttributeError('No StreamingCommand.stream override') + return + + # TODO: Stop looking like a dictionary because we don't obey the semantics + # N.B.: Does not use Python 2 dict copy semantics + def iteritems(self): + iteritems = SearchCommand.ConfigurationSettings.iteritems(self) + version = self.command.protocol_version + if version == 1: + if self.required_fields is None: + iteritems = ifilter(lambda name_value: name_value[0] != 'clear_required_fields', iteritems) + else: + iteritems = ifilter(lambda name_value2: name_value2[0] != 'distributed', iteritems) + if not self.distributed: + iteritems = imap( + lambda name_value1: (name_value1[0], 'stateful') if name_value1[0] == 'type' else (name_value1[0], name_value1[1]), iteritems) + return iteritems + + # N.B.: Does not use Python 3 dict view semantics + if not six.PY2: + items = iteritems + + # endregion diff --git a/bin/splunklib/searchcommands/validators.py b/bin/splunklib/searchcommands/validators.py new file mode 100644 index 0000000..0278fbd --- /dev/null +++ b/bin/splunklib/searchcommands/validators.py @@ -0,0 +1,394 @@ +# coding=utf-8 +# +# Copyright 2011-2015 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import, division, print_function, unicode_literals + +from json.encoder import encode_basestring_ascii as json_encode_string +from collections import namedtuple +from splunklib.six.moves import StringIO +from io import open +import csv +import os +import re +from splunklib import six +from splunklib.six.moves import getcwd + + +class Validator(object): + """ Base class for validators that check and format search command options. + + You must inherit from this class and override :code:`Validator.__call__` and + :code:`Validator.format`. :code:`Validator.__call__` should convert the + value it receives as argument and then return it or raise a + :code:`ValueError`, if the value will not convert. + + :code:`Validator.format` should return a human readable version of the value + it receives as argument the same way :code:`str` does. + + """ + def __call__(self, value): + raise NotImplementedError() + + def format(self, value): + raise NotImplementedError() + + +class Boolean(Validator): + """ Validates Boolean option values. + + """ + truth_values = { + '1': True, '0': False, + 't': True, 'f': False, + 'true': True, 'false': False, + 'y': True, 'n': False, + 'yes': True, 'no': False + } + + def __call__(self, value): + if not (value is None or isinstance(value, bool)): + value = six.text_type(value).lower() + if value not in Boolean.truth_values: + raise ValueError('Unrecognized truth value: {0}'.format(value)) + value = Boolean.truth_values[value] + return value + + def format(self, value): + return None if value is None else 't' if value else 'f' + + +class Code(Validator): + """ Validates code option values. + + This validator compiles an option value into a Python code object that can be executed by :func:`exec` or evaluated + by :func:`eval`. The value returned is a :func:`namedtuple` with two members: object, the result of compilation, and + source, the original option value. + + """ + def __init__(self, mode='eval'): + """ + :param mode: Specifies what kind of code must be compiled; it can be :const:`'exec'`, if source consists of a + sequence of statements, :const:`'eval'`, if it consists of a single expression, or :const:`'single'` if it + consists of a single interactive statement. In the latter case, expression statements that evaluate to + something other than :const:`None` will be printed. + :type mode: unicode or bytes + + """ + self._mode = mode + + def __call__(self, value): + if value is None: + return None + try: + return Code.object(compile(value, 'string', self._mode), six.text_type(value)) + except (SyntaxError, TypeError) as error: + if six.PY2: + message = error.message + else: + message = str(error) + + six.raise_from(ValueError(message), error) + + def format(self, value): + return None if value is None else value.source + + object = namedtuple('Code', ('object', 'source')) + + +class Fieldname(Validator): + """ Validates field name option values. + + """ + pattern = re.compile(r'''[_.a-zA-Z-][_.a-zA-Z0-9-]*$''') + + def __call__(self, value): + if value is not None: + value = six.text_type(value) + if Fieldname.pattern.match(value) is None: + raise ValueError('Illegal characters in fieldname: {}'.format(value)) + return value + + def format(self, value): + return value + + +class File(Validator): + """ Validates file option values. + + """ + def __init__(self, mode='rt', buffering=None, directory=None): + self.mode = mode + self.buffering = buffering + self.directory = File._var_run_splunk if directory is None else directory + + def __call__(self, value): + + if value is None: + return value + + path = six.text_type(value) + + if not os.path.isabs(path): + path = os.path.join(self.directory, path) + + try: + value = open(path, self.mode) if self.buffering is None else open(path, self.mode, self.buffering) + except IOError as error: + raise ValueError('Cannot open {0} with mode={1} and buffering={2}: {3}'.format( + value, self.mode, self.buffering, error)) + + return value + + def format(self, value): + return None if value is None else value.name + + _var_run_splunk = os.path.join( + os.environ['SPLUNK_HOME'] if 'SPLUNK_HOME' in os.environ else getcwd(), 'var', 'run', 'splunk') + + +class Integer(Validator): + """ Validates integer option values. + + """ + def __init__(self, minimum=None, maximum=None): + if minimum is not None and maximum is not None: + def check_range(value): + if not (minimum <= value <= maximum): + raise ValueError('Expected integer in the range [{0},{1}], not {2}'.format(minimum, maximum, value)) + return + elif minimum is not None: + def check_range(value): + if value < minimum: + raise ValueError('Expected integer in the range [{0},+∞], not {1}'.format(minimum, value)) + return + elif maximum is not None: + def check_range(value): + if value > maximum: + raise ValueError('Expected integer in the range [-∞,{0}], not {1}'.format(maximum, value)) + return + else: + def check_range(value): + return + + self.check_range = check_range + return + + def __call__(self, value): + if value is None: + return None + try: + if six.PY2: + value = long(value) + else: + value = int(value) + except ValueError: + raise ValueError('Expected integer value, not {}'.format(json_encode_string(value))) + + self.check_range(value) + return value + + def format(self, value): + return None if value is None else six.text_type(int(value)) + + +class Duration(Validator): + """ Validates duration option values. + + """ + def __call__(self, value): + + if value is None: + return None + + p = value.split(':', 2) + result = None + _60 = Duration._60 + _unsigned = Duration._unsigned + + try: + if len(p) == 1: + result = _unsigned(p[0]) + if len(p) == 2: + result = 60 * _unsigned(p[0]) + _60(p[1]) + if len(p) == 3: + result = 3600 * _unsigned(p[0]) + 60 * _60(p[1]) + _60(p[2]) + except ValueError: + raise ValueError('Invalid duration value: {0}'.format(value)) + + return result + + def format(self, value): + + if value is None: + return None + + value = int(value) + + s = value % 60 + m = value // 60 % 60 + h = value // (60 * 60) + + return '{0:02d}:{1:02d}:{2:02d}'.format(h, m, s) + + _60 = Integer(0, 59) + _unsigned = Integer(0) + + +class List(Validator): + """ Validates a list of strings + + """ + class Dialect(csv.Dialect): + """ Describes the properties of list option values. """ + strict = True + delimiter = str(',') + quotechar = str('"') + doublequote = True + lineterminator = str('\n') + skipinitialspace = True + quoting = csv.QUOTE_MINIMAL + + def __init__(self, validator=None): + if not (validator is None or isinstance(validator, Validator)): + raise ValueError('Expected a Validator instance or None for validator, not {}', repr(validator)) + self._validator = validator + + def __call__(self, value): + + if value is None or isinstance(value, list): + return value + + try: + value = next(csv.reader([value], self.Dialect)) + except csv.Error as error: + raise ValueError(error) + + if self._validator is None: + return value + + try: + for index, item in enumerate(value): + value[index] = self._validator(item) + except ValueError as error: + raise ValueError('Could not convert item {}: {}'.format(index, error)) + + return value + + def format(self, value): + output = StringIO() + writer = csv.writer(output, List.Dialect) + writer.writerow(value) + value = output.getvalue() + return value[:-1] + + +class Map(Validator): + """ Validates map option values. + + """ + def __init__(self, **kwargs): + self.membership = kwargs + + def __call__(self, value): + + if value is None: + return None + + value = six.text_type(value) + + if value not in self.membership: + raise ValueError('Unrecognized value: {0}'.format(value)) + + return self.membership[value] + + def format(self, value): + return None if value is None else list(self.membership.keys())[list(self.membership.values()).index(value)] + + +class Match(Validator): + """ Validates that a value matches a regular expression pattern. + + """ + def __init__(self, name, pattern, flags=0): + self.name = six.text_type(name) + self.pattern = re.compile(pattern, flags) + + def __call__(self, value): + if value is None: + return None + value = six.text_type(value) + if self.pattern.match(value) is None: + raise ValueError('Expected {}, not {}'.format(self.name, json_encode_string(value))) + return value + + def format(self, value): + return None if value is None else six.text_type(value) + + +class OptionName(Validator): + """ Validates option names. + + """ + pattern = re.compile(r'''(?=\w)[^\d]\w*$''', re.UNICODE) + + def __call__(self, value): + if value is not None: + value = six.text_type(value) + if OptionName.pattern.match(value) is None: + raise ValueError('Illegal characters in option name: {}'.format(value)) + return value + + def format(self, value): + return None if value is None else six.text_type(value) + + +class RegularExpression(Validator): + """ Validates regular expression option values. + + """ + def __call__(self, value): + if value is None: + return None + try: + value = re.compile(six.text_type(value)) + except re.error as error: + raise ValueError('{}: {}'.format(six.text_type(error).capitalize(), value)) + return value + + def format(self, value): + return None if value is None else value.pattern + + +class Set(Validator): + """ Validates set option values. + + """ + def __init__(self, *args): + self.membership = set(args) + + def __call__(self, value): + if value is None: + return None + value = six.text_type(value) + if value not in self.membership: + raise ValueError('Unrecognized value: {}'.format(value)) + return value + + def format(self, value): + return self.__call__(value) + + +__all__ = ['Boolean', 'Code', 'Duration', 'File', 'Integer', 'List', 'Map', 'RegularExpression', 'Set'] diff --git a/bin/splunklib/six.py b/bin/splunklib/six.py new file mode 100644 index 0000000..5fe9f8e --- /dev/null +++ b/bin/splunklib/six.py @@ -0,0 +1,980 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.14.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/default/app.conf b/default/app.conf new file mode 100755 index 0000000..44356b5 --- /dev/null +++ b/default/app.conf @@ -0,0 +1,12 @@ +[install] +is_configured = 0 + +[ui] +is_visible = 0 +label = TA-notableeditor + +[launcher] +author = Daniel Glauche +description = The app provides a custom search command to mass edit notable events +version = 1.0.0 + diff --git a/default/commands.conf b/default/commands.conf new file mode 100755 index 0000000..896fcc8 --- /dev/null +++ b/default/commands.conf @@ -0,0 +1,7 @@ +[editnotables] +chunked = true +filename = notableeditor.py +python.version = python3 +enableheader = true +passauth = true +is_risky = true \ No newline at end of file diff --git a/default/searchbnf.conf b/default/searchbnf.conf new file mode 100755 index 0000000..d9252e2 --- /dev/null +++ b/default/searchbnf.conf @@ -0,0 +1,9 @@ +[editnotables-command] +syntax = editnotables (comment=) (status=['new'|'in progress'|'pending'|'resolved'|'closed']) (urgency=['critical'|'high'|'medium'|'low'|'informational'] (newOwner=) +shortdesc = Mass edits notables +description = Takes notable events and edits them using splunk`s REST API +example1 = `notables` | head 10 | editnotables status="closed" +comment1 = Takes the 10 most recent notables and closes them. +maintainer = daniel.glauche@sva.de +usage = public +tags = notables \ No newline at end of file diff --git a/local/app.conf b/local/app.conf new file mode 100755 index 0000000..5a44e3f --- /dev/null +++ b/local/app.conf @@ -0,0 +1,4 @@ + +[ui] + +[launcher] diff --git a/metadata/default.meta b/metadata/default.meta new file mode 100755 index 0000000..4c828a7 --- /dev/null +++ b/metadata/default.meta @@ -0,0 +1,3 @@ +[] +access = read : [ * ], write : [ admin, power ] +export = system \ No newline at end of file diff --git a/metadata/local.meta b/metadata/local.meta new file mode 100755 index 0000000..b46d961 --- /dev/null +++ b/metadata/local.meta @@ -0,0 +1,13 @@ +[app/ui] +version = 8.2.0 +modtime = 1627514766.276218900 + +[app/launcher] +version = 8.2.0 +modtime = 1627514766.286227900 + +[] +access = read : [ * ], write : [ admin, power ] +export = system +version = 8.2.0 +modtime = 1627515880.715027400