django-rq-3.1/0000775000175000017500000000000015043301563013121 5ustar carstencarstendjango-rq-3.1/demo-django-rq-cli-dashboard.gif0000664000175000017500000007552015043301563021117 0ustar carstencarstenGIF89a6w! NETSCAPE2.0!i,6 )3); 0>)33=")3)*4(482+4:+4466*C+I,T 2H2W3\ 1R 8U 9]:GQn;[m7Tt1Zu1\z;_w>\{6`~?bw=c{C44F85L=5P>5E>DCCշ @+LWBFl\`cN"I]ͺk}EڦO1Am'>8ANe\Ft=drrR,/cazyl@/ӧ z59Pvu#>r g~S5HX(߇ (G5=Ί1s:HP' d㌢4tbxЃ>8h@ C-H$ hs Ec!sIB}S "0H#hS5u!:&9 Cy$?9by@>ވf&8S N->жK_t*51/̵hAٚV ؛nA4$ r > 'wX ¶IPp 锧* 6490ɪ>欣O)>.j⡬Њ-l[-k@z&>v\`+/YZ yu&[Ч%rt8%Wj;Pʌ){L>c1jQl< 2=9@8d-Fg/j43ms(O=v^t "] ;wzc]_64s@9ud 9ٸ{ME6jE@"PFy7 rU@Ɏ޼ʧ:<>υr5&5H=A']@ ]Lq<:!=h9T!)~cHs6AA(4 :K@ 1UO  a)z,W;8CB` c ,,mAmV%<*6C1 zVU#8Pw=}TKHs>A>lË<6POX琉 @  }Bf"5ʱ{`y5/;Qd#H8qbL2YдJ7I[1gpB.\*c0C"hpДzY"R!9GT d r¬ ΄4YjܓM 8D|f>Efzb\?)&D)bo+E|PE^aV240B&FOT:` "Їul "Yu=pTJֲ&s˙"BqQ:jֺO!BR `KMb:d'KZͬf7z hGKҚMjWֺlgKͭnw pKMr:ЍtKZͮvz xKMzޒ<3y/g#/Kw~ؿ3%` ;+ w8E (FpbX0fq]Lc/i|bx7r57 lXSd,M~2|);Q2e(wR2<0Y^&sͬf0khvs:sg=˹πL3· F+:Ќ~I;ґ4iHw GMRԨNWVհgMZָε[4գuGBE 6Obύ($CE-Vא{!mE,B A!, ?kM% 0)IBo( A# vo hO<$F#b7l1#DD H AIu#⺷g\$Nq,|$uxx2@Fs39As,ͭ[cBc֑#c7X"lHqyɩHBdI/̈́ l A`H$83$;A;2; e)BgYX&{d@3}A:I##3:]rØI!+u:yc NCCњMЙ9oi/oC?6W0%y@>>כa= ޸Y>s 4>s> $ aG t ND=SٜxgDyO Sg9Fa?? Y" 0<΃OkE@q}'wٟH`DTc$gC"B+A N=BJEs\47.ĖA46CI|$9H CJ*8(:\ SEMܳԠDL4k/mʤq 0 ET$:Ucy+DBGzMH.LnrKDI$omHK$7ItIڪ`Lt$v^Zv#@ !0 %HDĪeD꩗(Ϛ*8zIz㙗SHM}!KdKZ8ڶNKy+ܺ%+ҮTK.բr!ePˠ&0%Dv D[Rq`wY!$kQXرaz{W$$kHzOq2aЄ- ĨP5Y WE4% uCѵP._M/k,`Ĩ7#(˨dP1Kly T S"4uvA[R Sa -1UߗT|e ! a+TL% ѺֆT+Pm;F_V[%Wjʻۼ;[{؛ڻ۽;^cBc۾ck +<\ 1Fdvi&F&i6i\l|$<&'),(*,2468h<>@Bnj`Km!n!GjSkEAlUdik"-gtn42Et0BxL91~l7_[ r lW +0ex>uC]81 *ѧJA~r 9˷ ""܄ >I!9o͸GxDblQ:|BǬʼΩ"Dܧ׹b''{Q{Pr(1H|هoBBOF XЗy|=a7!IBFk`*'+gAA,eY**2E a8+{x)2M2KH.DnB!\)qҞ;,n0ˎ9 ˻]SkT^N~0]^ g+W } ?_ؑ m6ɖȶ')/13+,p ч2Fq?PCѝ;qNN8XMݍ &YPc PxMߞddC%N(Y=5n"}4^IcyE(eΤ=;]O 4cP2mѤQz1R/U%:t,w 챓PNZ^ v,o.d]U}h%vX֧ }dnQPbCoǑ'Wvx铦okdG#PY}pv3۷qҵjAL:yT?GK,HL=ރZώ}ŧ5]*A`h&wmſROȺ6*@Fp-1g" 'ι=j*Q 30-U{}!mު}XTGId)@4 }rQPI%hq xJ LciH##ZLNϞCa)F/M:9 pDZ.?E XS  b:LN&4H QuJ:QSuZKVWjUhu*hS" VU`W(a7#epaw5 Zir 6Ҡ"jE[o5ܓ%\tUw]vu]xw^z^|w_~_x` 6`Vxava# Ј A$0ZAѭ][PX-&7PH @6@Y{cg75i(dUZÍŐuTaeINu /<!4WTSק͇0!lhGHm2чDoiܕ0Q9`(!(t+GqXgL.@r,=hZ @%`!1B^C.>4Ju,apHRIiy dn thp-%'_c,A\ =OH)1$aӧk0#GT@ jpăTWFX2Uwc@P&JlfCE7[T=Mt"J)!RTNk 0x#I>e<Xz̉I? 1L8LTB54tTKJW9'&G^3t҇^XBZ(@ϦJɈFHQp#l&r$-jRJE!"c, )-,iKOwŀa,;e&4MNuIdbIJ5)CQ`aWJa焰2",Gqjzª2q]ZD-\RZU>Z2V !x]ҬౌUcuA#յOV PlV+l{[Vmo}[W%nq{\&Wens\FWӥtB Xȴ}]^jvdʼyE r tbSWVY *6^_5l #T7\c3,IGƍ"Md{C@U8tBLL&]puLPtM8Xz^tb uoQ@5d0-Y M^q@cACwv,g=xVLJ9r>ۧ=Q]tiQ?,g3K1U-ћ=:t^4`WpЮ!;fHQ.2!*y|.=U#sVO~x%.([8/Z{'̘yqgM[r$f4e@UzthvT#xGIudE{@,%jkҔ9%:l0#-GV0-yU&5 ڍB6m"裟HַxkCϏ1DJo<ܠ,A_' Ez^w(5kԥ+jQHiN?zA!VRbg Y8+oV X"v;vU%qU]=)>Ա%ZQ_[ai;-[ժxZrDxrR~N,SF;.rֵ2[މ~]G=U+Bu|Yߣ}g^}_'~_gRkcikY]W/̀+YZq'H4[L0rA0P;3t01URd{$/pJ@;qsa  S,5ZS41(B8BQI'd(,8r2}t&{؆&!sPІB/ )'oN3=s.ڴ\Q\X{ J5)k5Fi)˹lȚ&=9#Jh͛ەΛh2 sZQ#7×δ(MOѴLs?،MٜMڬMۼMMMMM NN,?ZUWۘj猈1tN8 O@,' &) =/o9NI9p@c[0L P0$iȈ. cφBJ1;6txL+J(Q q<ظpaP-=@$+P9R,QT9=e1)93t< B5 U&%ݷ+ x@R+!46u3HR' DMѤܯ4R א4ǣ }(J2%%Qj"{CtY UQ7TEԉ,w X&_$RɌ` OK3@?lRVMhQo)U9$yʜ@$h7SL ԇYH8T G<%q5a ǀWZ Х:{V٘Ol8}7: [8pu6XQ Xvσ}W}0hMsZ IUXy3jW٩$T0J׀ɑUãb:ITG`8Hȷ;>35ͻ#q"e*=|bU%zI٪Dda {pT#% $m\Q́X(,tִͭЕݕ5NM]]]m]}]؍]ٝ]ڭ]۽]]]]] ^^-^=^M^]^m^}^-^s%#+^͎=^M_]_Ue_u_}___` ``^FV6_~``]` _ i؇X a5aؗXa~X>a~nvaNaab a"!b%6%a&~b'$)"b$*b-,#c2c3nb.c42^c5b6>c8~c,n9:^>c?c@dAdB.dC>dDNdE^dFndG~dHdIdJdKdLdMdNdOdPeQeR.eS>eTNeU^eVneW~eXeYeZe[e\e]e^e_e`fafb.ͮDĖN@fe624f蕚{x b%8zAnq<%W&P XO`H8@WPE,#hw])yV8%Ju$[h^)Ɉ,z81paXihȇd钾k xɞ2LC|1e3)1 (P9̎ݏv"*?TJʈNkWM k Ⱜ  pSN8D_HĦƍBTp @ n٭Vh>ؑ(G'ppW=(zp)Hlvzm+RWXDXXUb)ѾvkְxDER`nwz~X U Gb:vm WnE0(#~8ay6G`Qm|8F GHJam|@hݮ^2*YhhwÊhqm'~p$̶xLa TJڍڊ*)r2)|Ѓ (O8bq}t)@%a|J| ).Z(qA'2%gJw:qK8@+}EH+͛U<X|hb+sw\Ndf,SǢ:a+etZ_ u\Wu^u_u`}vb`?v؇Pvdo`vgvvjvvlvonvqpwsr?wue_wwwtwwwyWwzwsw|w}wowvixov?c/bWxxAxxxxxxxxyy/y?yOy_yoyyyyyy'!i,c)l )3);)3")3,*4.043+4:+4064=44*C+J,U-Z 0C 3J2W3\ 1P :^7C=T ;a?a%>X0=C-:b]$ ALTAHEB@c1&yنaD@nDЈs8MU2-8 /w7Qx/T X>K\qЈ#c#SR)%^,H|AB+r-ڙhj[ 4BIcALbsW0G9f"xs]S)@@dC*J詬꫰*무j뭸뮼+,"ꪯH꭛I0Hsi|;5S|Қ@zK F%`xp-ZpKoUﶸ^o K42hV, ?T.6ktQOvs¶Zk HnV r-Q_T?]X,ktUwݾ)3DA"dAc/e/~6*(Le|uTB +3]Q/=kNjTE,h^h3hw4^޹KVQ*nfAc*RrDvTRߧY ׾Yym p L$6n.SlzW@Pñ*0j;{:P:ogw/o>[(A?vkTSU|׉zߪo^{fd"Qq.P\c3. jiG q ~ y0VQ%& A~U#,HS$0Ns(/-K&X]ŅJG]dOH Ps6rQuKóMGzĐXA|L"F:򑐌$'IJZ6DJD"VњVAtU xe2I-rSJ3Y 7QVW+RdR]Q]4PA2m+ ooղ 5䥫̓B5UІTfXCuJZ森slU Jw&Ɉ#涃z?*4eUHMS&4TWIΈ2N Ld I\@m=O+E1*ֹv2ε#,JEJFW4NtR?^u^|ꋤG]hMZֶp\/)mr%leGguׁ&;'R3{"@l{(a; |KP*q gA9 4]13D%jSg}bK]Gy IXmN4TmKT" {?x޾3t -!uZѺZZa5fif He lWtE[[ -D|*R=Ps몞>8 X;9znE,xLDn̗!<0gL8.^afhzSSXU1}td`$1n]Ӝ#F1Id^3B&.yDf\^PbSlD`!̡Bg d/Y"q e.29#H5m`Wוf>( $53>}̶t0bZK%4 \1fJVʹiN[[PԦ8UZ=PωwrkJvLW*!Z,c)l )3);)3")3+*43+4:+4064<74*C+J,U-Z 0C 4K3Z 1P ;^7C=T9_ ;a%>X-:_0=CR $JP#hAuI%H7fj$?GAC`[-rvH!lygIk %GEB vؒѱsiݛwXBKz@܏9-7H'_4T2q}3D'!.HhB{ߤ&I%& B?x)Q 3V T6]*E+!EAmBhAU`CDoc⵴ӼjK `?3,a4&]n I8ɉNV)OS((g|V8ygQ3`'󩪌U,RgB`d8<)D 0 ,5ڞH.MFM-~{k|7E_ @D> dKtc"[*Ari%`x0 DVpx~A8(Y7w@L"1S;g8#eVM(zb V*RhҭgK"sɺ,'g.{HdW☙KNeC:3߽kkgfख&>+fޮ϶m{VYWK.whEu,ZՄQ"V M,gS) Y!4]FT")3,*4$3;,<>3+4:+4064=44*C+I,T-Z 0C 3K2W3Z 1P 9^7C=T :a%>X0=C5NB5NB;TG;ZG6CJDELKIGIANQFRQCSYBZ[HV[LZ]]O@[R@Z^NW[Y^WQ]\SF^hVaY]a\CbwBe{SdlXdfT`t^tw_yaVAeZCf^Jh^Fh^JkbLedVcdYidRmhUljZsnVur[{x]onaaototmetwet{qnatugrvixva{zd|~kw|rp|}b(PgJG"݉(jb~aH$46I$_l8E1ΘQ?V4хA)ch|@?P:eHD^~fcrxax, m8`r~Ʀif $`:j衈&袌6裐F*餔Vj饘f|f:BJ㟏OBZ@*pT+mk~h&(e> uT"Pu&g[ZT?} :(wJڠc@ѐo03wxkPy*ŀW2ذJ Z|1·B%`#Jp*̨ /Z3؆DxH)$NZ+c4\$ hXKgMiR;H^W4lp-tmx|m$͍.ّ0ȗIkM?TV+o!c0Xl?p·ZyBO߬kLv#<%G̺qH/OL98|}RG"g#U>=n@ `N81YjibB%1>/GǸI\ɋ\<)|ߌ^w%@29A"~< f 1"ʗ^-+.8:+F"HL&:Pb@WE!vR`.1}@` -0K~ rc옿Zo|bV <Q䣛v. D J`搆%q* iC%D Έ(SRL F +ƚrQJ/=qQp$3^ f.hbr/DiСgalD_.4T06ǎ(y˦65MPCA.0K:gTЃ&,uI sK@it9])ZQ}j (A|K*+m9aZJ8ͩNwӞ@ PZ=4*(֑13 NQB5IS>tCȪjQ7S>y, -k}1)BǼPrݐJWyl_מ-ZuQ$ A C459K5> x2(^P uֳc F}6cC/R,jwٸ: xPPН4Bt)w?BH `&c*7ON[RTuַjOXe;Z^rԷr jzŀD LNDVĢX\*}62$bT Si@$0EDծa<αZ+91S"sHA( !!"Jyaw$J>hV s\!WrTkqr MT0)S̶ I2LyiQԄf^4#8-XxLs")3+*4$3;.04,<>2+4:+4064*C+J,U 0C 0J2W3^ 1P :^7C=T :a?a%>X0=C-:b5ZG6GNMANRGRQBSZBZ[HV[LZ_]O@\R@Z^NW[Y_^RF^hVaY]a\CbwAe|JewSdlXdfT`t^twaVAdZDf^Jh^Fh^JdbTidRlhVliZsnVchcaotmtketwet{qnatug|zdw|rElGtMtRrQyYvX|Z~d{n}n]eajklstwvsyu{qx|H`A \ȰÇ#CD #jȱĊvI #p`H0IfC tS͟@qa̙@jƥkɃt O`FnLYѭ^F۶UK[sRCDجDܺE`rKV ȓ}9pf`,B9yᅲg״}{[sIQneƏ +=6[?Mw$@8vaG˟ Iz|KV/@Z[[AG~$c1f~aHFѽ0UU`I5 iHG`Xbo'VXߋ1r[ A@A `I1@FXFW܁xZMxSZh5ԚeQ9AX(&ԙ@apV`E>?/CM5EfeFYwؘ~j8`%f駠*ꨤjꩨꪬ꫰*무ѥ~z諻[EyfTn*ꮽJJ@`3^R+bϣ)oŦmÞjw AɾIn[ʞrE+azF(1`c26,ӊ-&q e}=񧅳5]2qG 0[:^0z:whbj1u)xADuߟ Oȏ;cHA:mN?,:NmAEo>k%;wN+JƂ_B7@,( Ff eQB$͈p+<:P8pt^~(@ H"HLĴ)D(Q8TOUT6C^Mca $@oTz_sc(Ӑ`EOfc\'n^!MHn Hs 'xx1FR#~ @JPȥVDfBWˡ |NwML@‡7x4KBgB, SALe@=3"Nna'4`66g׼+\PO<. pPHs/sj03-}];?0u5R/== Lj*=hak޷!pZN"E25d43',fd?EsrഐP@3{jVRzڅ#A!3U( F-:22\("7pt-vz xKjnو9ıHثT1<uCG?Z }Rln")3+*4$3;,<>3+4:+4064=54*C+J,V-Z 0C 5I2W3Z 1P ;^7C=T ;a%>X0=C5NB5NB;TG;ZG6ELKANRGRQBS[BZ[HV[LZ_]O@^S@Z^NW[Y^]RF^hVaY]a\CbwAe{SdlXdfT`t^twaVAeZCf^Jh^Fh^JkbKedVidRlhTliZsnVtr[{x]onaaototmetwetzqnatug{ze{~lw|rEkFoGtLsRrQwQy[u[{T|Z~d{n}n|u]e`jncllrvxs}u|qx| &O‡#JHbD-2thǏ 1ɓ()VnHIĕ-s͟?q)Q$z"-`O+,Xѯ^͚hD7](V^׮M4'L$#`rCLUqgRD :4!DNhYhBɛ7넞cS v特u[a RPU!"ý˟Hv%TA HfA_(\E$& 'dXKkS9o@ tleʔ^G"XJ*0$H:Tkʘb _bĒ+T h!\5jQ$DwNvNGŚ8/TBCf A >`6ԥf].n_-Z|6i>;&җq\B_hGJWK6F*epS_g Gr~6^d=(ʧiO],Ij5_@90z3ߥ:A9_(H a}ABӠTx υ$loDx@Ms L YCЂ5TA FpH*jVBDEC^^Ei1xhL6pH:Dg4Hˆ<>*)1^V♝{Y$# D~>ņߛ0HO~#ejpO! !<&5IjYAP!B%\ωtxYFM^hqH1#I3afj''U &1w?EI%*/"J!S)B~#&/)"dML&M)U13O~R NC֒/SR<6QPK$mZ)@ UKӞ@ PJԢHҩR5O1C ,@T)VZG+eFn. B!˲%O*rSӀ=t!#J…DGre4G U~Нqkc A`")3,*4#04$3;,<>3+4:+4264*C+I,U 0C 0J2W 1P ;^,L7C7H=T ;a%>X0=C0=H5ZG6]O;FNMANRJOUGRQBRZBZ[MRQHVZKZ_]O@]SAZ^NPSQW[Y_^RF^hVaY]a\DbwAe|KlSdlXdfT`tUnq^twaVAeZCf^Jh^Fh^JldNedVidRlhUliZsnVaototmetwetzotrqnatug|zdw|rElLnGtMtQoRrRyX|Z~d{n}n]e`jklrss|nrx|@BG&O‡#JHb:zpСŏ CĨcC(SHr#K9bS?5WqX*ɳ(O@49 D3(uQ'6A,+~+[Xo[W@h>< ݻ\ĐUqEjk"pwcl2˓zGܫޗխ#F?iGJҒ7&Fmٓ(=m3h^>nr+ ײ5B)T!fH8̡w@ b^ZKA1L`ppLOe0Pk@0gJֲ8(ىpQ*P :Є Up@MftGu#7y_.wMZF'6 âugQQ$b'CF ){3K0Q3c'g:*yC&53Bd2Kɏ<#@4" 7(LIc7xBK3Q |$!=3G+cHEi8Yn4I#ik^+iS2S@4'z%d0cO"jfSܢM;vF7юz HGJҒ( fFPc"K>el> XusQ`:͗3Ԭ5Ivc刺4b#hF\DjEd=j{3hU1ZVru,LlNAQ>}>*ШZ`E/dK ka BU4Zꐢ }`ViA\"("J XWxBK\JU6e ;Υ)IZk"=vz xK^FA #,$^JIAҌ!Uє梢8E HH{#XuYX(1K75HDζ/5H iC[g8Uz  p#oV4do4_{'_ۤ2!ec >5pwK]E30 (B2\BE!3ӺB 'l@ /vcA v8=ֵ1C</}e&2ХXO6ފ")3?14,P ;c+=HCgJl=FD1BP:NX!Lp"Ps+Wv;@b1]z;_w?byE85J=5WK6HVYYWQJetRof\EbaYfjYts[t{dyva{ybMtQvY~j[ft~x@pH,Z$*a`aIK!SX`6 ((Cxt--# X"\XbV()zc,,t , E{#xcBX!*C"*($XM',OQ,+{Q_aQSUWQFNϷPXA!?, & )3 ,<)3)3$)3 9<2+4,J 1K<`Fd4PY!Fd(VwL;5TE8HL@LP@DQYV_K]il[xpn`DkhTpsSvX}{Ziqqxvb}zb'W5e5j:l5jIz\{lhzy\rt}@pH,\$*aG=>]a;%;p[-(5H;GT!&w.ll|C~*.{CoB KEb $N.zB.+,MTN -Q{( .QT^ Ӊ%WEWKW//箟C@THB -Vp]!,"9 ) '('< '2 )3); 0>)31>6-")3)*4#04%69.14+695,.2*4;+416437:2941<:=54*D+M*@,U 3H3\ 1R 8T 8]9I:&84?&=:>/?2@AB&A7F%E8yNzU{fJ&I,K&K*I4H:K3J9]ixzu H*\ȰÇ#JHE2jxǏ CIƍ4BS2dT‘-s kR;lh%. Oj4#bFe⋬*H+|D\O>Ɲ`"p93\b@Txp@1 DNE . j Cê#PDddD,Gr0/&md[0n rh`@p xJ;4H/z(Qn@m8gm1A( hy@C y 8޲f"dcN 8+bpRAl2nY 4 }V% t;Psf@Q Y.Q1# H*+l gbz׫Ϛ(Nqň} wj -`Qcߓ% MH58 q šVb#ЃP O D $I(tXvxC1.ذ#8% uB!2;aKW&C0Б ANXSciPF;D)F y+rXw3IEKٸ$"4ui7 $~$౺0 /Ğr;%) (N.Kh6֧/YA@0qt8DJP4bmԖ{Jv+{AVFܞF["fD9y5*Wp\S7 MAXSxEl` d<jx@A]h 1;3 `0ȅn`66 !J;x b_*A A?DYM=x"5ًAHPLe.XxDiUȝR1H"0ѶlL')V _0@:"_$D, *hNHf0&T1L:Y/{ !,$ # (#+/9 ,,,*25555+FO;AC;S[KMMMckk~8`f5#!a_e ܷǾ_7]tq'd.gVEX,SQE !,# # (#+ )3/9 ,,,*25555+FO;AC;S[KMMMckk~:v93%ePG,i HܷǾ_7atq'd.aVIXLcYF z!,#  # (#+ )3/9 ,,,*25555+FO;AC;S[KMMMckk~9!d9jS=3.2", "rq>=2", "redis>=3.5"] classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Distributed Computing", "Topic :: System :: Monitoring", "Topic :: System :: Systems Administration", ] [project.optional-dependencies] prometheus = ["prometheus_client >= 0.4.0"] Sentry = ["sentry-sdk >= 1.0.0"] testing = [] [project.urls] changelog = "https://github.com/rq/django-rq/blob/master/CHANGELOG.md" homepage = "https://python-rq.org/patterns/django/" repository = "https://github.com/rq/django-rq" [tool.hatch.build.targets.sdist] include = ["/django_rq", "/CHANGELOG.md", "/pyproject.toml"] [tool.mypy] allow_redefinition = true check_untyped_defs = true pretty = true show_error_codes = true show_error_context = true warn_redundant_casts = true warn_unused_ignores = true warn_unreachable = true [[tool.mypy.overrides]] module = "django_redis.*" ignore_missing_imports = true [[tool.mypy.overrides]] module = "prometheus_client.*" ignore_missing_imports = true [[tool.mypy.overrides]] module = "redis_cache.*" ignore_missing_imports = true [[tool.mypy.overrides]] module = "rq_scheduler.*" ignore_missing_imports = true [[tool.mypy.overrides]] module = "sentry_sdk.*" ignore_missing_imports = true django-rq-3.1/integration_test/0000775000175000017500000000000015043301563016503 5ustar carstencarstendjango-rq-3.1/integration_test/integration_app/0000775000175000017500000000000015043301563021666 5ustar carstencarstendjango-rq-3.1/integration_test/integration_app/apps.py0000664000175000017500000000022115043301563023176 0ustar carstencarstenfrom __future__ import unicode_literals from django.apps import AppConfig class IntegrationAppConfig(AppConfig): name = 'integration_app' django-rq-3.1/integration_test/integration_app/admin.py0000664000175000017500000000007715043301563023334 0ustar carstencarstenfrom django.contrib import admin # Register your models here. django-rq-3.1/integration_test/integration_app/__init__.py0000664000175000017500000000000015043301563023765 0ustar carstencarstendjango-rq-3.1/integration_test/integration_app/views.py0000664000175000017500000000065115043301563023377 0ustar carstencarstenfrom django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from .models import * import django_rq @csrf_exempt def home(request): if request.method == 'POST': django_rq.enqueue(add_mymodel, request.POST["name"]) return HttpResponse("Enqueued") names = [m.name for m in MyModel.objects.order_by("name")] return HttpResponse("Entries: {}".format(",".join(names))) django-rq-3.1/integration_test/integration_app/tests.py0000664000175000017500000000007415043301563023403 0ustar carstencarstenfrom django.test import TestCase # Create your tests here. django-rq-3.1/integration_test/integration_app/migrations/0000775000175000017500000000000015043301563024042 5ustar carstencarstendjango-rq-3.1/integration_test/integration_app/migrations/__init__.py0000664000175000017500000000000015043301563026141 0ustar carstencarstendjango-rq-3.1/integration_test/integration_app/migrations/0001_initial.py0000664000175000017500000000104515043301563026505 0ustar carstencarsten# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-02-09 15:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='MyModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.TextField(unique=True)), ], ), ] django-rq-3.1/integration_test/integration_app/models.py0000664000175000017500000000043615043301563023526 0ustar carstencarstenfrom django.db import models class MyModel(models.Model): name = models.TextField(unique=True) def add_mymodel(name): m = MyModel(name=name) m.save() # causes a DB connection at import-time # see TestIntegration.test_worker_lost_connection list(MyModel.objects.all()) django-rq-3.1/integration_test/_tests.py0000664000175000017500000001023515043301563020357 0ustar carstencarsten# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import logging import os import signal import subprocess import sys import time import unittest from urllib.parse import urlunsplit import urlunsplit import psycopg2 import requests from django.conf import settings DJANGO_SETTINGS_MODULE = "integration_test.settings" os.environ.setdefault("DJANGO_SETTINGS_MODULE", DJANGO_SETTINGS_MODULE) logger = logging.getLogger(__name__) logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) class Process(object): @staticmethod def _command(args): return list(args) @classmethod def run(cls, *args): subprocess.check_call(cls._command(args)) def __init__(self, *args): self.args = list(args) def start(self): self.process = subprocess.Popen(self._command(self.args), preexec_fn=os.setsid) logger.info("START PROCESS args:{} pid:{}".format(self.args, self.process.pid)) time.sleep(1) def stop(self): # to be sure we kill all the children: os.killpg(self.process.pid, signal.SIGTERM) def __enter__(self): self.start() return self def __exit__(self, *args): self.stop() class DjangoCommand(Process): @staticmethod def _command(args): return ["./manage.py"] + list(args) + ["--settings", DJANGO_SETTINGS_MODULE] def terminate_all_postgres_connections(profile="default"): db_settings = settings.DATABASES[profile] conn_params = { 'database': 'template1', 'user': db_settings["USER"], 'password': db_settings["PASSWORD"], 'host': db_settings["HOST"], 'port': db_settings["PORT"], } with psycopg2.connect(**conn_params) as conn: cursor = conn.cursor() cursor.execute(""" SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = %s """, (db_settings["NAME"], )) class IntegrationTest(unittest.TestCase): ADDRPORT = "127.0.0.1:8000" HOME_URL = urlunsplit(("http", ADDRPORT, "/", "", "")) def setUp(self): DjangoCommand.run("flush", "--noinput") # self.site = DjangoCommand("runserver", self.ADDRPORT) self.site = Process( "gunicorn", "-b", self.ADDRPORT, "--timeout", "600", # useful for worker debugging "integration_test.wsgi:application") self.site.start() def tearDown(self): self.site.stop() def assertFailure(self): r = requests.get(self.HOME_URL) self.assertEqual(r.status_code, 500) def assertEntries(self, expected): r = requests.get(self.HOME_URL) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, "Entries: {}".format(",".join(expected))) def enqueue(self, name): r = requests.post(self.HOME_URL, {"name": name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, "Enqueued") def test_db_is_empty(self): self.assertEntries([]) def test_burst(self): self.enqueue("first") DjangoCommand.run("rqworker", "--burst") self.assertEntries(["first"]) def test_site_fails_and_the_reconnects(self): self.enqueue("first") DjangoCommand.run("rqworker", "--burst") terminate_all_postgres_connections() # the DB connection is gone, so the worker must first detect the problem: self.assertFailure() # now the gunicorn worker is ok again: self.assertEntries(["first"]) def test_worker_lost_connection(self): with DjangoCommand("rqworker") as worker: self.enqueue("first") time.sleep(2) # wait for the worker to do the job self.assertEntries(["first"]) # job is done terminate_all_postgres_connections() self.enqueue("second") time.sleep(2) # wait for the worker to do the job self.assertFailure() # let the gunicorn worker reconnect self.assertEntries(["first", "second"]) # work is done if __name__ == '__main__': unittest.main() django-rq-3.1/integration_test/README.md0000664000175000017500000000102515043301563017760 0ustar carstencarstenA sample project to test rqworker and site interaction ## Prerequisites Install PostgreSQL sudo apt-get install postgresql Create user and database sudo -u postgres psql # drop database djangorqdb; # drop user djangorqusr; # create user djangorqusr with createrole superuser password 'djangorqusr'; # create database djangorqdb owner djangorqusr; Init database schema ./manage.py migrate Install required packages: pip install -r requirements.txt ## Test To run tests: python _test.py django-rq-3.1/integration_test/integration_test/0000775000175000017500000000000015043301563022065 5ustar carstencarstendjango-rq-3.1/integration_test/integration_test/__init__.py0000664000175000017500000000000015043301563024164 0ustar carstencarstendjango-rq-3.1/integration_test/integration_test/settings.py0000664000175000017500000000657015043301563024307 0ustar carstencarsten""" Django settings for integration_test project. Generated by 'django-admin startproject' using Django 1.10.5. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '!s1kl4g@+13igo3-&47f4+5-zfj!3j&n*sw$32@m%d65*muwni' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_rq', 'integration_app', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'integration_test.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'integration_test.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'djangorqdb', 'USER': 'djangorqusr', 'PASSWORD': 'djangorqusr', 'HOST': 'localhost', 'PORT': '5432', 'CONN_MAX_AGE': 10 * 60, 'ATOMIC_REQUESTS': True, }, } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' RQ_QUEUES = { 'default': { 'HOST': 'localhost', 'PORT': 6379, 'DB': 0, } } django-rq-3.1/integration_test/integration_test/urls.py0000664000175000017500000000151115043301563023422 0ustar carstencarsten"""integration_test URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.urls import path from django.contrib import admin from integration_app import views urlpatterns = [ path('', views.home, name='home'), path('admin/', admin.site.urls), ] django-rq-3.1/integration_test/integration_test/wsgi.py0000664000175000017500000000063215043301563023411 0ustar carstencarsten""" WSGI config for integration_test project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "integration_test.settings") application = get_wsgi_application() django-rq-3.1/integration_test/requirements.txt0000664000175000017500000000010715043301563021765 0ustar carstencarsten-e .. Django==4.2.22 gunicorn==23.0.0 psycopg2==2.9.7 requests==2.32.4 django-rq-3.1/integration_test/manage.py0000775000175000017500000000145615043301563020316 0ustar carstencarsten#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "integration_test.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv) django-rq-3.1/Makefile0000664000175000017500000000025615043301563014564 0ustar carstencarstentest: coverage run `which django-admin.py` test django_rq --settings=django_rq.tests.settings --pythonpath=. python setup.py check --metadata --restructuredtext --strict django-rq-3.1/SECURITY.md0000664000175000017500000000030115043301563014704 0ustar carstencarsten## Security contact information To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. django-rq-3.1/demo-django-rq-json-dashboard.png0000664000175000017500000014240315043301563021333 0ustar carstencarstenPNG  IHDRJkJ]sBITOtEXtSoftwareShutterc IDATxtՙ${vbOL& ē_a)oS+/ZDFtlTD[N}8h9vki]V ],\EBxRk$Ȗt?FeGeYIc;>y<޹CBBԔ^'gB$QE\O\O\O\O\O\I\I\I\I\I\/!YEKJJgff&tM{YDgg'`0 QWWWWWw7ٳgΜI$O\ b)`0 `0 s]"`0 `0  `0 `pz`0 `0 fy N>=33E`0 \VWWz뭥X|^>˗/w}X `0uʹsgffmۆp]v`0 w_Rhee%`0 s0L,r`0 `0 N1 `0 <| `0 sD%#._<22rKEEA)ŋD!7 h4W^R'ͧܔ N@Eb .xJIXt/M-U AZVV8B{!H$ Uzկ>S$KJJxejZ8AVZ~UyJ"UP)Jz?55ۋXIFՕ055ogΜ[ou׮]__6nܼ`0 dp]w%ccc}Y43gά_~ݺu%%%Fժ?,[n%3ΚB KK/O@ 2O7$@M/eڥj֬YsܹX,H$t:̨2pXQhT9~i}}7  NBSSS.\BHQZ?c 5%!V(ޫ3gVTef~GS9s̙3g=o|O?=zr~Hd֭[nU?v./?Gk$^_tf~/}1 `0A%`0}KDsΩUWWoٲe=B(Heu;*uuu,_ ## PҴZH$SYuIZ$ŋjοzjxu1 s .\r$$!4>>ر:hsߞ&S^H (fۻk׮]vgO?tqf|||饗EFGG}>ѣGՏovkkkUWWf{]]]n~+w_+?M;rB OCfS͛uѱ9y> k65}-%_W?Oh$58@ Bq 13I# h XT!TZJ9h寄s+N:tZӧO={V.'k׮U]~K**nGvkK]g>>ItB}EK3|F"jkjjJKK _U%155=?P__zˬa$9x`mm3<<3<=3n;sF4~wN|{>Ɠ'yVi~#;7+zVlھZsۦ LڛO}T5 6-6:,ay~**h`0kG=z(t:Vju:]<Gt:5W?-8e]f͛o[o-II_ .SQyWVVDE!BK$.Mn#''ɵw>P L#ב3QrC{}Nw~2Jnhzo~jE5&?~DɵvOk/{u$Yuným͛jtͧ\;\ x'TnnuLLNŵk߿z;c ǵkճ 2qLnGɑ8f=;t>9>U64 ktO q:]O7'B9Cs]F}r7\^FѱCT8]Swg&xm[&nn_Gf/f޶:&Gf^ޡtsM15 kv{?~!kOij_8> 9`0 sru-mxxرc& nw]]pYYY]]]a)FQw޽kժU>;::˲V)݆TزITF7y{l8yGS= 6iʄ|cӝHl HQqF}oOj/p UrMNJMݿe66OUndg>8Ps g4-$IRG۸%n,ys6YoOa0 A~ꩧ~򓟜={_zrSO=kV[[SOB+5GUQE]~}xdgΒ hܒ2ړ|*7?p jQ!clںgCu;\_[Z2ȯk) `07.eeexpKKKkj0mmmٳgGGG\OwK/TWWw# y~F]k~J~=Ũi#^[]w7/9or#[ؾ}> 0Y^A'Ţ8YI@ _oި6brw)njHÿ]6|f}o>J[hnZkst(p_6? ڕ& r=`0 FGD"ov[[JD;uւs{ RNOOMMM^{mժUgv~hVa՝'OT' Ү;O>ٰaCW 4n@PB" A.!.&P HyJҥKxzff&s/vIIɚ5k$IZ)Ad]?Ǜ_[W~VWD|&N$TiIIIEEѣG_+t N_Vi_/7y?m۶?02,)Ą~E 7'Ctz244Yaaȼ:2D6?:yU:׽o߸MouQ$kSdFPhãd/&:k_:o؞LbҩqmsbP&pj>vq|2"8>y7gS׮M5yMY/6Q -PѝGG LYSW g_.ҹ5|dtr|(prѣ;2>9>> 0~%g1XS_>vd4N:|l84 7l'_?6lH.zG R{WWOK''9ڇ]as]D˓'^~N}oj/@NMD':2-rӝ'}46yĩÿ?6|~Y2*<~H61_o| @!@ DP=y稒j!"ZGR3^xAKXZÇ~^z饗[ZZ]|ѣG9=M7T4htOd6ӇMLL\g>yLMKǏfϟ{*w>?vn~9~'O?ojݼ瞇t/95|ҵw>&S~WϿhtzZPޙLCS[*@pC:kpσt=zSƶ2kzukdںeCC}5]S C7y9P"eCS}{֎Of}7 o~)(_&*oY/eoԙ#}o-_pA{6?ܰrkf+ $G.W6`0 N$9{ٳg)׿~͑HXv͛Zm'9pf~>hڵ@ӝ?~d: H H$j1HD4Ӑ{徭ϛjzAȧ_jfxXFd`0 ǻnj4շMLLx<?𩧞898x` EQs禦EX,V^^9(෾['brrrttTs+++B,VpDT$H IIT*#JIH2@ 0/,%bժUVbjz_RR,ԥ>JvS+væe`Jo֮c*o/O"3տ=+3]G%?ܟ"43M qǏ4_O tP)BDߜNG KSJvqm:W__me]wݵm۶˖U IDATǏ?~jkk777/JVcais?H;\Vno{y"G70 |)D"O>dZuuǏOLLJ EQ۷oO8qXfͦMzV]NyeҾfPOﮯW-P,  H("iF" \tiffiuV ꦃіgΠR^Ѱ~}r^nD"1y)2lj l[/*`_C/_?#qffGH gSLZ#+okK?_);{[4GzޒcrVtCpRVVښM̗)vuy' K50^sݦ,`0 뮻^/.Z5j7tr-;3(]M: 4@hS4Z hP#h>]֪$B(9W=XpJKKoݚ)znr7mLMMiI:v-AԖ.*+7B=;VYYIQIKcM~KI@B{8Qp#99"@Xu[{A",?nYa0 `0``0 `0  `0 s]#IRq_zСCX `0uz {! >}Z}#`0 )--[KKK408`0 `0 s7c0 `0 { `0 `08`0 `0 LtGR`0 `0 溆@a)`0 `0 s]7c0 `0 { `0 `08`0 `0 { `0 `pz`0 `0  `0 `pz`0 `0  `0 `0 `0 +gY3/dh孾/dftxiFt ̱,CS{4gaXѯ"mp_ޟ1_8+tDz\18 %qh[;1mXL5,v+`~Qu-*3۩QtE(9%JWc9h`*ØPx.5_P\nP&_^6 ɲtVm !Y齢 x K&_W#I sԍ*F4#lv$Im+ROV4Md "4]җ(deotѬp۪=/(IQ&I1Vh4 &.M|t AWi#;NSVyrw IRad:<$I~!d3i`t7VLFFkwE(ee^倝MZ42`!-gXv암,uޢoUB!9zaЂ,ס-+U#4m!u#cD`&)fEKzjv MO~$'if *94{F#{,$ǘlfF찊gE%*gW1v(h1sMP8{WY &jP 8xʐ;\64ÉB2~exQUX`P۫weH@ Pf}no1=̰IŴQUh3}yuDˬf\y=cliqR!nxc٤ ~V4Z鹟G59MR԰- g;[mFXcn[ tnlFMdB(2678[dzzsJBc ʋpWkUK2$ӷW {zB}='c]}CBZ"Cw"c-U{"%  oЁ&2163FT"Hmʨ]=G#=#aPx@KU޾TEh{E-#=mUMwFN ,:P^l]U 4 koڑԧ-$4C(2٦W{ueue߁-*}C}@ܺ ZFBhhSUKXR!RzՂ#}{Z"ۡOHW[Î}p߾Ɔ<*ӻ {zQ!Ã]{FmdҟI2 }cPxwh!l!SJ0ٚRHO jd#;v ߢOHWs0e|v@}9MQMHϞ ˜ǺZZ vTշj "MB(262Iձ{$"#]l P*YGYЉ-Qg@w{'(2ҽ*GT5CmjIzH}#rTؘc$zT>a _9]A^=9lo$:l 'ۘnM[Gd`}[ls`0pqL1BᑑȂ5ֽh"=v=ڨjޛQN 1jNa:DΆ=䕚ٻ f:ТO+2;8A(2ҳ1y 7VvT9ٮkWׁ}] 7V5 unH^rx&UwPxd$;)ZĕKQ27ƺZuM}{Pr`0X߾UMiW l#{CC=z2Ha[7qߑ#K7, B6lh?V]؂_`sd@k9:~VU s!9$cT-`I.s˥8HV ;$T5ȧԾz ߱pz?G`?dO`F{ՈqEȖ"A=ET'ޮ'"mj@ܻG9[+`{cUZBݑ+!ТWj8ͬQh"YԚS(*YxZ9]9ri6RygwUF>R5{664554 ]2MTW]Uoor ɩc-ܟrt62]XgK2qۛP:@d,ޓ m{M=4tsEz Rc2TիjިnS0\sjՓ@ؗ2f|{d)ҷGo7VTCIL;GzۻC9ooC.wwUp}KӞBM5Kzr[g j뉨ݣ\+6ZIo %C֌%j߷w^_tΙhoRg"Cݻ'Fsܬ:[h`_CJUMzٮGzv[\9ТAY3r) ~gz `3wE! ts}ld`S"֪0 ww"4خN,T p5O_e$+ k [vuE_.gLQ5WPTKsF.X8d1ɋ6 $dݳFpEdafʵE((Arqū(!1u[uXmTfY0KbI)2FcYe9W(fS[+K)I֝aS~MmxI:d.0$:2LWT2X-< @W #2* Ȓ i̊B I䱤T`Q$F,fH6[ }(e@)BST1CRX]nЩTDb(CO Pn:Ű)p,)x6v6{rO I@DN} ,&v.qYMt[iUdxlpV$Xew _N-1Yi À(6#GO|f0 Jє(|N \uS-ˣӽJ3"K2"E3,elyEQŖWo1>40h 2{vp3GѬ# 1L4M+rH%䱙D?F$ghV0zD.~oa^y@,gD3"RF/9:l$RpmU`A"u4frX3Fc>IۅⵔD6 >R~o0˼fW p/\ɮtBQ4("؃r.(YœLQ5WPTS]H7ZJ!XM~,=-x7˜c2,maSST[~e(×u8 $-%ي% 8P( : w]n $ C+TY)2jLn~ FZ-hfG@Yd&(@hF5Ti+9)*8gVeBRfq̴bg1 \1̓Btc a f'2E VNJ;hٚ|yxl7)^'" -FX=P( ymbPRHMgu1X]^Hy-H/ǻ,$2RZ8=$:X&ߢ)lOtyM ^d 58` uN0(1|QZ4)m5RHh(rJTՊ9.?%f NgHd._HVd) \j @i6{_0 Bм%)VklA`>'wt`DQg2l2$zf*=$lӬK tf(`j B2yr(Xh2>D*Tr/YsFpXՃ_nEPgu9D.ph0HG @ey@9heR@ Sixi֘N8Wgf۵ ΊF>TK8AѫHueniHYj^uj0Co l]Yh%dcEw,ho(W=Մ>,VOFʢI3KD.u %,v N, ර|(Rc{ӝ({%y^ZПbqɚQ1BRb FP^]"k\&i5ܩ*c3r MӬ]|fd)qH*D˲f1dxA /b$p&KPh΂A9D^ᩂ*C:(FRgb( Y;ĠcX5q^tmQCѬ;<o VCĺJvO64͈Nvfjry ,˲fϹ"Cgu;8Y3ѐlN`L.೘VE}TJR`W,WƂPt6Fhu.4Msftt#uJ0 ,&2YbDbq'{oF\+2,dӱl>I%ZXuC.4V;co6*k/_Tr`rFqE#(P`q8m"Y:p\Sfxv23ψ3y_rvc{ `0 `0W `0 `08v9:s@qi\@N|" MkUeޫ\Il|ɓ_;ic7:~,p,%|8`9e(*/P0~+D~m4ƷyBʲR䳰WP:P~=Oǀ`6󗀍&eXF&b!!k)S3 "HYpI Ͱq"m(ǔgޜ]mP&_b+t\TGPeef5+rl24j8hV02^_=VEE}'Pg0r c:趻)?_F嗚 }Vnm`(+q rVn u,_gd"Yŗgw٫Vo|! "cszx_n=}vȳ4AdNQH>YYh7-2qQVQӀl.GR[$? x` "ϲmaRE3|& b.6eRSleEo1%\F&=ii`xkd" U76Q8,)YXhDgY>sbv$dAqHÉ)eM{,lv^o:?rNkhkgʒM3,yEkp ?d ~r+id6;wxLvYYf@AyQN/e,}kXV(a. c fQث2m8( W)5JsCF(ܷoG~bK*iз #<Т{[WmkUKJJ §k\3*5K>׸fTsU3*¨0***\ WJOI:wウ=7gUh ޿}P^Ye=_Yex2=d0.t8,n9@~|>fg4_eY G8ʁ.:Zw96F f61σ|q q]{PZ*L6CYN)rP? S*@`޸gJY}A5ǏUP{.{-Z4^,[NY!3Mi4{`JmX>VqYku',VvuVvBZLzovNj|z/sPs-l|/wI%WlfC rzv*}Ne6WNz?XZ2Vvm1~8T˧iܬT~LLjPݿP^̞p˟RB,'LO:u_W71<9;M4=Z4}ԬG} WF:lfM8Zdr?荓PCn/_:4:Yw8QTNGt}?xU? auvDm~fq+>Nh֜-\V_Z@xa\Z}ȣnڭh)tjŤ[rZ:`Cd쩱~Sٷ/C!scߥ`=n}V~\|衺fR ݿL/72}p9zU[/SsQT*[-F&Ml0rW8hjTƎǏ!i+WNi`yF|,a~5j MtSnM?5:ڵPy=e9Wnhk;Uq-d)6;.^6 1yf܅:J=[e~zy_F%9VLw ٖ;~̖>% MCzϾZjNe8*~>_gejԬ67C>5=~%|:}ǝi{B[ rjmU>N:u~+KUzy~v2rOOM]Im٠]Tjk'm͞?Mtj42[EiR2MW6_~/k8ȃ*2iweVT7e)C3ሠ9$mw!MfhsyM3BϵT5s1ьid1mߢR6CJD3ӱ1o~giWR@Rn;_,->*fRaVFR 3$v+9';IQ 8S08$50X&4/ktH ?8J0skhRXq8֐ao # ؗ76ں,$#]4͘ǘ\L)A}.lz6r1s?l(Y@W<% xpGzi2|Z޿ )&w7{((h৘ 8 C*HB9?Npoudy}g[xc;qsc+uϝD X`۩yJ`aKWŀ2! ԅ2R(-H.0dg뉢rAS-҉kHwľkH#Ī'j<6< 405iMmttYtwAi&QAH":V&RM兤 [Ye+nPTih_bAS o!D}UD+L$Ib^#Nƒ4qU B#M.lp1i=yn4Kt n244 4m16 b d]AarBvgCAي\!ą˄Bp9nr>J,^(jĆdÄ&z5D h蔧qB}8Q`]48I$Arvs8PdCUhB؀$NA-!G-)EH\ˇEsSS8ui׍Wj ` B\2esڞYu" ]/ULDcPmwcL qF}=,1TY`۷~!d])1B (D ߱giRw!BVP -.hç]I "m09ABM#W1'ʎI Auu%I$~|X4% Ô # hR`[b-SŜ(K߱i}xALz p\BDz X .qD0uC E"sXz^D"2 m) ,;mrL/Ŷq'$_fp{k9eNF)]d3ٖ)ZA𦁥RTe8>u7QM#A幌&it=Mn R`4 'oٓnxreG ٖvq_$ ԳTT$GקjdJDнS%R4)uM7aupbl[9"}ˍ,7\1F򷛩g{:~+YWݳ4όr$ 2 1"g@6ޓU$]^' b?L^+PR$Q@W©];PhdUjZ _#T?m<4Uj-e9իjX\ ;zl]YvvnԪZv:ߴHx}l[o'uFٺ_Z/]Wj3ƴ*WOm7fm3)Ͱ"3yn7|6ϙ펛T>>6vOk= wv+Z:ޗOG~йÿGOZ]gf5}i7fy/;{Y+lLhŨjxީ펦z1H]wjך$ybf~dQ1O:)cmsm9Αj;bų,v\A箱L~+EMiKfYu[5R7uWC^oZQxGVVVU ૵Z;jɇ̱ypS#[Uz׌sȭƏz׻ h|xӨj**|Vgdzl/)=ܯ&FqB83eq%l06G/~vazϲqnf#k1|lZ-^>ҳۉX;u>t5nVV>[ n9yoTyh?uZ:h]7\cy/ս?FQC!yD1'VDNJ9;nU,q8Be5Բ=xo=[^[5JwQ<]߇!NvḢFW@5 agbȳ{m}:L:]'e~YwV[M_ڍzjוM%Fʶ#q:|Vl6[퇻Igϙ ڂ|Q2N7v:ƨe*} CIz ٟh=e]_ܦiA0lW;V4[ݐAC?EްqolGVpnznUqn9eҩWy_>ٻCkȟk?hGƿ^ n7q  T1 j<َoSM]#Cደq44 R?K+߬~I]`o˲@g}@r߈S .e稷!r-?9 bdC d2I=IlWZ-:nvs*|mE i[MsgcWƖ RGf~f7nvf7nv?悛f7nvf7nim@j"Bb?kInƅߚ;yH-Aȥٙv*Bs(׌zX"T>l!ue( zM]Ɩ1&]C=44cLTOm NsUYd`.O1C'nvS'`=`-r :MD&bqHP V/ vB)b43Nao8Ie_q1Б`,6|RZ=C" /*gGGP3u-'VOƓ-AcSO,IK4KA,VH` vz׏)(Vz֗aIbI1 ,38m&%n?3|];厣4xϤI3~3}1g[6ռ$ M9L%vLd!7ޫ$j| mϠkH,vuE$ϕjJPY A,n^BĎ* 9XT ID+Y_Ѥ >Qf[žjĒ0V/x"x%M=.UM.仦T51|Mn@O`,񴴯>CJဢIW I1d駧Q r3s ?'wiiU33c &cpC)1IZߵ)Q#;4[m9zh 3Eѹ$AQَ޸ s IDATCB4}K[(~JsBshM0Y59i9ŐvQRm| w+mMĐDBRb)CF mSt(:İicW} aYS%Q ޹\0ӽT6]SB0H^C.sցCDq HCr4NIbljD-t[jO_̎r:Nj6Ye٤Sw&ſtj$VZy̲U=V?v8,˖*˲c0Z-Ɲf՝,;ite]ޙ/W*OW"˲s|,[-b;x0d8.i-^ZzwvR{gY`V{<{c\*G4ԫ{+Zɧi5k|{:դS2˖N$v{2lo=OYw|3Zk:,&:20i?16zyϖ,[^kp/de]=Ye&_-l9}jz~WLEeco:A/ח߄j6ZGCfJ`۴ۨ^sy*od;8_kl5{`ejmlb9yjU?~sbtXj31/V7]ջ*Ç:^'L6+Eaa_Z|P:O]JbyA1;>F_ i:UfZnԪ|~2ۨV X J,Hi^9L,8w*jyFh͟[91ՇME=TwĴ[cq7Cwv̓M?-̺^\RL6<)y9l llSop/=3~Mjp enve?Twt}Zm=gOj|:}ǝZ)^dnovF { t'r15/M\WVZbj*XTڜ5`wf[#iV6)( 6BI !@C$cJ8s8ccCכ< mOoɆiCXWG8rҡl:JIl^RZ &.||J)[tAz0RTm׀!7'~@H&~QPN ȴN;qoB. y N7|<}e8@p/Bs͗oCS Y4IYfk8O&ʗ axuєR~w1l aq[XHӔa?rsh0G`F b.$ \6l]J Mҙ}ӊNc\Ǝb+uv*Gz(y$v6m!dyi7j؊Q iQZ:BZOlh$d/¢ShO{lw-iXDM§zHQ^$BA$!X(M |JVUm,麛n*(X&@u8n%4Tj?i9 G12#ݔ&41l~O)n{aQFϻ"3sǛeQBp.P,740&[U=0I$w'NˆAq\\k^ӹσH4p vJӔ5-g d}2c$D۹&ks!:q)=MkM!H/I!gnbMl8~78W쇅dH)d_? n)}p70uaֿ !;DV!!/I}}zc[7|ݸ~?Y`۷ Lℝs,TB\2{}$ϱ"BK@1D\8ƴPU,$|Qb??5Է= ʇ>O}+;.V@d. FZ\цO ;"m09ABM#W1'גw'@=3$7=&U-;bȶ|NV qM#AXIG>ĔRNki'CTXfHKM%[w]<3enyP0qKN`eG*c}/Lz]N3tc4U,OH}ӎM)DS~JxAB,צA7%i]8`׸)='oÒ" *n0$EmOgٱl`[F Y>@ڙ@Yg,-SbO]H @778"ʦܠ) p| `F?*,'aC,+/7=,/ hQf50:I/3/) ,rG:/Ibב'"Ǝر?٣Z%)#+Nj0Er{=>9binzE@ȒY%U> NU{~r}Gln ]f`y-^&O̒f,J$s"QDgT-'fls*]./1pRp]mQ,5 CHd3]*dNnvybC?~?٧4kU,oJ3 }y١:~{N uPOsqU,X'^{'ZNx?~2e/+B?"oxgi$_5I0ԅ?yhGy'r/q|n5SC\bK&DP*Lt-,|kI>'s@fj[V4[Gim'd7M! >tnJOO政eYvsܞ3$]I4Cf{nZ˷GꠏT2HTsԛLʴȵò! r 27dMz?%݀zqUR)R8((m)h Ŧxs'Vf7nvf7nmsnvf7nvfuexJ_B =Xy'b %X M !RH, \O2_CvٹKC t Sqv2F=އ\VzԘb n?\sl O]0_'aA+| q5`Ldmr/@}+@^W0*?BT_Q> XXjK75*PSXE_3-AcS?3f)Uyr`hh8 7R4$Զ| u#{2A9f`RoR=^#uPYx>s"%5,r4CCb^AyWz+=KZm]9=C" g8~$y)i`Mu% W´'qbdZaw%I`> J o L3At/MKiaoJENĶ0\BZfQOWTSxZWb5 >AP;bl(L#&^!cISo I*~žjĒ0V?`<}3_hj>1cJ#4eX ߽15E-&2Įh/͑8hw L߅G$8MЖ PIdM! /J"{QtEErH3d#!¢foYR7 ]MAR9Eor{pW1K]=RsFU Q5jXn.r5ڮ!PR0P?ȒA?tM9R{z5*!D{!ͥ[8noA31Vz=UՉBD% ȽH\{!q2~O&B, ;dÞH{9 F*bQ5㦒ѓ WQxՙ-DEL!D bG ![QE_kĈ`;M"ܿwg2mloΧӁE.aOvw-)נq#)ׅZ߳VEsxE.9e!b?+ loLGl" `,N;xc#y34q )q ! 1OS$~R$*g$MI(*KD6 -55DvV}I$jH|a.#CUcv1) 7gT@~u/IS E Ƣ%2rBCWh!x A ݎ!H@RE X؞/-F(* 2F=xlYr>{og9xR61*P@SsV4SSM3 ȪLz7s3ѽjipˎr:Nj6wPY&F~Ȳ,×d̲l9}n*{~r6=Tk"˲l5Lf,˖w0j:[ermt'jK-U=Tw/U͇uPilǃl-&tTF)v3Y\j`8]fp0xc돣E-O-T<*0\djr_ˏ|΅66*b}//JiZ=XdwQlEeco=@y_&OEe|fz{Ʒ*N۴[dJAlEYi>MWYtqB5Vޝ.ݧ|9ֱ8¨Ye=|Oy8],[;M4͎E:}Ĭ&2Vu}cL{WZysG8ZeYP]_!rb}5T٠Vje?>GI7e-ǝP}zhk{_Z]Mʦֈe|Y~Ӳ\|:쎗Yk0A/חw&,˲I̷߽15`̲l5 Nj#(E%!\JoZmwOp xҩ׋*&Z3ɲu<[l5{ 9e2˖nRِM)_,[jշ 4j9;*f}Wߏw$͞'lW6_,wbXxۤSܽ.&:Nw} X ڵJ}N@6i칹u zWf5ڏw m}W(${WQVmfOvNyT}_UV:=~]Vޗa,:ϭJaWs|yES ob0?]\ 6iߔUVVJ3.ԭyzWu&j=;`eBІ %zH(#ui+i^>N} |ŵV`wFɏY6{ _{]w 4:'-L?jc9h]ΏkͧYMFwz4}yw!5T2~f1՛Z;Y}-l<צ}yU2Qt8KiM,KI|0^ <[j*?jw*@Ny^wW{= ޽NZx?QKu1{]ths"hJm<. c @v FI'al퇈Eq0?H(D6Lia¸xRJ!B#hOєBhzcL4c"c pEgFV? V&ze4I"!hB N P >id3+^@!d'I0JLr\3^)CdsxRxc`t`vfk"~w J14e1:%ZԤ5/f6|$jodKӔ=q ebZAi^=sC0ư楉b@zӗCI [ل+(%d㽱X9'&iQQn ms9BϵT!.Kd,0X&4d:/i~J'pL79q@,'z*Vt/Twrj CXDID~@ "Okr. qE:c`NDR g/j!BDRt',M)ܯ>F(:eZr뉜|r cNHF4c"?Q "PĜ3F@# 8Nh2":ZCRo|e;OU7L$IBS*߲aB\9vE8M>xIHfw $!G7iJGB-o "ֶF54zj dsd=z8N,_E )H >aGzў\Kꕖ2hͰ4SD/ԙ ϔ"L c6GG &JMݼGDܝ`K$pu%I$~@E/|sC!Չwӗp-,􌦅$[4I ;LuOk6CVbF M\;tc)\s@ )vi6 qƶ y@'TP>٤ NlMQ?$I?DkEAsDdRa}l@)BP"AkaO}CJM?w4I8K%sƢ"" "$h]l n;ȽW_H[K@8:+\I`kyTR%@``kB#ϏRW(Y{a _%YeISˊ$r@Ig9 bY9]%QA"=K`_Z/v p/),B}9K`my~J=SZV@Yg,-S4+2p`řQV%E©gٱ|};ľl(u=;L!nKz"S" 6Ik 44)x=|2CIR  6EGxALwNH'G(̐8JRni`\Ku✄NDRsا AȒ($ eF ˙_seG ٖɊ59奢"< *DiG둛V["Х?Œ}l:ΔC%7=kyTPXJafV@mHVH/H(X_ˌ&L/bMp-DEZߵ$"cM& ##..Ji9p/Zi1\GN rP% h=MRl&t条rS Pv3NO`9J"1s$b~o͖cuk޿[o ,ϔi#G b1R<<ʰq(:X Xv ԨDӵHb&=g= >ѳQ@2Rl1,;2%I\tڋh Hm?BaO;Z\ƴ+A7oV¾kmp *R]{>O:R#F,;|Ds$H.wBsva\5ZrcG{jrH#\$SD.25IBS&5MSOdKB3V$A$IXhDvf?Rϒs&R$RANkۙ$ 83ŗ81\WK- CbK35Op,jOŎKF{[ܔyl=Ә% w`Ԓd[ic,_aOE.' cNŔE8IƔWU$(bOžkO} Jz.2;78@hն+WKZ_wi^)4l5~JFUex̷C~7/4c}v?nvٿ2XK|nXdCV~%m۫6n3N >Zf篦SM]]'쉘hޝʼnio~W a#'ً0%Wdi bXu~Pj<َoHȽi$z90?5;N/>@pI}ݿoY9y-9 Ĺn%Iܱm7&J9lŌò麺pK߰b\I,b qH9nO7g2{!TRjyz7U3 Y2yfŞXcD_wWSz~DٖFn*)ovf7nvf77f7nvf7nnv z$_C8'3N FXP~{46o]PΌ~PY;[V}Ϻ&,q2Oe*WSŕ+W+ŕUTqrTx* [Q2GlCծ=?7+a糼d1?b4h5?=I=98?90p4;V-CzО(-ܔ"OF_cd pTŔJxkD+JQϢ[$Hc}D눯⬠h:#):M+ <w|W[.s;Ai`_SfSնڹ, ?ښ;4QkΎ.Jz_y[CO<]cuK(vy)1 u:|8K!chh885Ze'j|Y 1JwX|lif*{ \UsyF!o34 EUWg8Ymɺ&}:GS .U(i?cN?BҪRiŚVtUR!Ί(E;ꤨUc8'Q>_?ٶ.B$8[(a u݌^!a,-/D4- V;xA^tU&GjL#,IecL!EՎ֑9TlV%e'BtB=2OEmw"caI$BMl]BykFBbH 1SI$I"to=;J "|dEW}\k{ew}"[| P(JKpP|GEB0h!T !IJlOG)] aI~KBW!Ax,SwUnK}JNv:O#-x:"*=+BEk8$"Y{LUlZ2;}*kJ4˽,0DB06L@n,Y! º`g.a(@8ҖƎVL+PS=Y{N]1'"Ld+_j6z :%B1^idArf#\䡥!=%B0^mίrŶ*"YjUL9{?09\zD^=F}?B>0'Vn\Tlg`ڪy_>k6;G)9&e.!t7Ys )n qQ4=^ڕD an4U)!QLC(RN)UXynTJؔ+0DBvW9XC#[KoEdc>Y%R葁jY,K\76**M+3?D#S:YN#tgBdtmv4wv~~px^o>Yq4==ƣYaڽ5nzprݨ]uNFګ qq,_\A;.b:|8o?fڪzjh>ͽf붿Z05Yl,z Z0f[otQk կ_ȍ߶@elvCw0E1ݶg)EQLǃ`&veÍi1z8n(ݛV|1ZyqߟEQLGq9˸(&:hbr⎘z`tm޾Obһ=[tpdvT Yn?ܵj۝t7I+ߖvϓIyv2ѸxOb:zj5wrݨ5ڷqQt<Ob|Yo&EQa?*gƏ`2x&ϗBS߯3K!npWEѿm5V6^V'ݫƫY޾덋brӪ5fx^o]= &E1>^4fZmo*t:/׍Y4(~ >ja`z8uw\ec>8{?,94\7iQ-P!ֱPW@'VV?žtpw6?6ߟ7mfկn6. ͛2~0Ÿw{6OGMrKZؿm#G?wi&۳xw.Zi7vv?jtx^N ء1"54<_E*j>dfU.Zkwh&{\t_1}Zrg벘ݶ-UjE>7qZiaX.jg{&8/+ih_;6kO]~v?]6+3޼폞.ͫQYⰫ@^y8~Gt2tG/m.,ǻ 33;[e7?[r>XS}UWFuL{׍}oRe5ʣVluӻiWWU{7nDӐ fwZaUNlf 3M ϗpK913ucyե?$w}jqߝ5j޾;dmr7P$qB0E( cĩHQ0E"0_5U υ.~$\߄)cF(eČU~<۾ͯzfav$wY "/pBYA{TV;UVSؖn5P?J,Ur% ^@}'E *M`넂4Ol= Ꚙ8vJVFuB6B(G|y:nCKgu(vR $ ƶ'("[:7m-z_!{E E=XTEߵuqNDFHS)$@.g v"k6 0LdvUYVphLq*Pq$gK x ;n L0ƪ0xJDc,wC<3}=wd !ڙu=T7Gnl Prճwoi;ռ:*=*@P4-厣f$JZ)O\ EIRFZ.Usy Hv-3p5yԒDIR:wXǖL'V}O- Aq+rv-}9o*bjH?&!Tm98vkꁦz2kD1- DzM!KCXCSϒ =\WG'VgX`E3=d YQYf>|Yd)8;\iӺ'rrݨ1I8}/_WW[jv 3zhjέxn|HsZ&VKfNjUxsMLu|Y?ioTkXn_g2R&ꨐO{܅1!H_vB`f7L:Mȣ(|=sQ:CE5G󐊹I8'I55Nma1+/b4/؞ǚƿE'c ud =3*gAl:p8'  P )V28 Jύ77D'*Nؾo<:lxɆp8p8P_{?_pb[[2T< 9,XIyw2TYh:_6}lSWgtHC*j Eϩ?Bþ1n*v_!~I.^M,J,$1Q_N0ڭF0’}1/\;6"gKܬgP4S_Y`*dftDitbfN7-bG @,jN+44`inZB?][.@hz2885*\6l %'4U ay|ui@ME$cz=E"X ~hd`񙧉1Y2rf&V_"!R'=DB!FGD$=hD5jن"b*ɓd>mB =ZĞ˦%C &<"Y:;;,$PuE$HT\>DDl]-dE3b;j$QTYxLwJCXg9 $$b!D#Ȳ$[^D ],]O TX ̑I3TYD"'Y`*AbIwz BEI@qRvbHyd@" b$B,iO[qG {b,.e48G2DhX* S)PvG7׼i#@V[HE-|Xe,؜TB^Yh) ^ 1љ#@jB0^mί', OƧ#t!oWf۷UI$cImш~OlB|`7Oi]BqbzF<O&˸(jWտmnE1~y| 'Ӣ.de>G޺M(&^ev<],ocOb|h^= E1ݵg$Ey<e.[t@1U+VSݟM~cڿ]xbh]wE1>^6y`h{{G絙ګFeRTt,ڍ)ŴwӬӢܞ5)U*6޿m}?8-=/6 M#xn?^jiOyѸxNbԽnzjEWݕYOϗ5и|9F:ş |={Hr6pЮZd4ô{Yo? |h\<'E1=_/bߪϗ2G=A?„?.))z ݨ(ze\y4Jb\Y4@7~Q\7Wݲ(KqN9̄@ٸzoy ޾L8Oۻn41SMcᩧy%8;[FMsV V+MY!ӗӿ @֊o[U f[1Qk_ߴͥOvbrlޔ)Zyڽ வJ1k۷/GYp/7x^[ęePֺ~lPUOͷ񼾖o֌v}?kU}krU_u'/pxm(?x9}et|q1T?<-V/?ǣAU珣brhgu%ZĴMsϼ3QSZZtTȋqU?돟.j P7o^NjAQo[I1y_<7Mv+n\R_-ƛ~sv/Qx^VʟL/kJ^SH$9zZ8zhxu*놃h7Cs/S*QuhZK7ݫq}uԪ-,nڻn4{/`fVo7Ŵ߮Z}"jz|b "44,@i>"pkk[#O4-FcSV8T@h~Q!4VHO $)BI$NEE`olI. $Wƈrh۪ lSQ܌frb;{!BpoZ*su 42_Wh&jPʈ噫1(&Q` A@>KO+FP),+Ƅ]Qk~H5u"%Ӑ8LGi G5W j,EMLSrKB.T(|DaSk(b D+b357a`xH$i0$LRzbyXxSOj @&Yd۩h݈D8K`k ;$W BҲşS勒P}Z*$mIΛ+-ӷa$Hh5_5x*7oTxQ7SY)1WELǁ-_4AB Gl)>i!Ji'VcyJbkie߇Y "rDk=I7H1˞*,8ISqM\'KuD+("E~N +a dYς(vR +-Ai,* IP  `ITU9nLFK튂(Az̛y*!X>82ҁ< 2W %|:2ݙ&?*,TìSyn65Klk %]Cc̳\UT-r~m'BB]gf{=51 'zgƮ#EXVԵJdM {ndiN"rS$J1ϛ-62F\5WR4L}MA^yPF!&X-{+$|i4s|J2I*($D4p@#ۍq)yTcvZ1gkvx*:h?GvX:kHmJkH`l{Ȫn*)ґ6 Rrx$ ?Rm?d]7 K>ZAJL )%V3_W0g@Zwp~+4JSTӰI,xp~~ΦX1H H<ߒOz @Q3@i>ehN u@UL@d}yxyp8p8Ηop8p8p8pxyp8p8p8p8p8/9p8p8p8/9p8=p8p8^s8p8=p8p8^s8p8{p8p8p8p8{p8p8p8p8p8pxyp8p8p8pxyp8p87IENDB`django-rq-3.1/CHANGELOG.md0000664000175000017500000002362315043301563014740 0ustar carstencarsten### Version 3.1 (2025-08-02) * Added `/metrics` endpoint to expose RQ metrics in Prometheus format. Thanks @terencehonles! * Added token based authentication for the `/metrics` and `/stats.json` endpoints. Thanks @RasmusThing! * Migrated Django-RQ to use Python's modern packaging standard. Thanks @terencehonles! * Fixed an issue with setting `result_ttl` via job decorators. Thanks @Sir-heed! * Various admin page improvements. Thanks @selwin and @DhavalGojiya! ### Version 3.0.1 (2025-04-28) * Temporarily disable `object-tools` block from job detail view. Thanks @terencehonles! * Fixed a typo in `setup.py`. Thanks @terencehonles! * Added support for RQ >= 2.2. Thanks @terencehonles! * Properly handle cases where no `HOST` is specified in Redis config. Thanks @spapas! ### Version 3.0 (2024-10-28) * Added support for RQ 2.0. Thanks @selwin! * Many typing improvements. Thanks @SpecLad and @terencehonles! * Added management command to suspend and resume workers. Thanks @jackkinsella! * Better support for Redis Sentinel. Thanks @alaouimehdi1995! ### Version 2.10.2 (2024-03-23) * Added support for Django 5.0. Thanks @selwin! * Fixed an error in Python 3.12. Thanks @selwin! * Fixes an issue that may happen if Redis port is not specified. Thanks @terencehonles! ### Version 2.10.1 (2023-12-18) * Fixed packaging issues with 2.10.0. ### Version 2.10.0 (2023-12-18) * Added `rqworker-pool` management command. Thanks @chromium7! * Compatibility with Django 5.0. Thanks @perry! * The scheduler now defaults to db 0. Thanks @bennylope! ### Version 2.9.0 (2023-11-26) * Added an option to delete all failed jobs. Thanks @chromium7! * You can now specify `SERIALIZER` option while declaring queues in `settings.py` Thanks @sophcass! * Updated templates to match newer versions of Django admin's styling. Thanks @nikhilweee! * Don't show `Empty Queue` button on registry pages. Thanks @selwin! ### Version 2.8.1 (2023-05-14) * Added a button to stop currently running jobs. Thanks @gabriels1234! * Added a failed jobs column to rqstats command. Thanks @dangquangdon! * Explicitly requires RQ >= 1.14 in `setup.py`. Thanks @selwin! ### Version 2.8.0 (2023-05-02) * Support for RQ 1.14. Thanks @Cerebro92 and @selwin! * Show scheduler PID information in admin interface. Thanks @gabriels1234! * Added `serializer` argument to `rqworker` command. Thanks @gabriels1234! * Added `USERNAME` and `SENTINEL_KWARGS` support. Thanks @joachimBurket! ### Version 2.7.0 (2023-02-07) * Able to show multiple execution results for each job (requires RQ v1.12). Thanks @selwin! * Various admin interface improvements. Thanks @selwin! ### Version 2.6.0 (2022-11-05) * Added `--max-jobs` argument to `rqworker` management command. Thanks @arpit-goel! * Remove job from `ScheduledJobRegistry` if a scheduled job is enqueued from admin. Thanks @robertaistleitner! * Minor code cleanup. Thanks @reybog90! ### Version 2.5.1 (2021-11-22) * `Redis.from_url` does not accept `ssl_cert_reqs` argument for non SSL Redis URL. Thanks @barash-asenov! ### Version 2.5.0 (2021-11-17) * Better integration with Django admin, along with a new `Access admin page` permission that you can selectively grant to users. Thanks @haakenlid! * Worker count is now updated everytime you view workers for that specific queue. Thanks @cgl! * Add the capability to pass arbitrary Redis client kwargs. Thanks @juanjgarcia! * Always escape text when rendering job arguments. Thanks @rhenanbartels! * Add `@never_cache` decorator to all Django-RQ views. Thanks @Cybernisk! * `SSL_CERT_REQS` argument should also be passed to Redis client even when Redis URL is used. Thanks @paltman! ### Version 2.4.1 (2021-03-31) * Added `ssl_cert_reqs` and `username` to queue config. Thanks @jeyang! ### Version 2.4.0 (2020-11-08) * Various admin interface improvements. Thanks @selwin and @atten! * Improved Sentry integration. Thanks @hugorodgerbrown and @kichawa! ### Version 2.3.2 (2020-05-13) * Compatibility with RQ >= 1.4.0 which implements customizable serialization method. Thanks @selwin! ### Version 2.3.1 (2020-04-10) * Added `--with-scheduler` argument to `rqworker` management command. Thanks @stlk! * Fixed a bug where opening job detail would crash if job.dependency no longer exists. Thanks @selwin! ### Version 2.3.0 (2020-02-09) * Support for RQ's new `ScheduledJobRegistry`. Thanks @Yolley! * Improve performance when displaying pages showing a large number of jobs by using `Job.fetch_many()`. Thanks @selwin! * `django-rq` will now automatically cleanup orphaned worker keys in job registries. Thanks @selwin! * Site name now properly displayed in Django-RQ admin pages. Thanks @tom-price! * `NoSuchJobError`s are now handled properly when requeuing all jobs. Thanks @thomasmatecki! * Support for displaying jobs with names containing `$`. Thanks @gowthamk63! ### Version 2.2.0 (2019-12-08) - Support for Django 3.0. This release also drops support for Django 1.X. Thanks @hugorodgerbrown! - `rqworker` management command now properly passes in `--verbosity` to `Worker`. Thanks @stlk! - The admin interface can now view jobs with `:` on their IDs. Thanks @carboncoop! - Job detail page now shows `job.dependency`. Thanks @selwin! ### Version 2.1.0 (2019-06-14) - Fixed `Requeue All` - Django-RQ now automatically runs maintenance tasks when `rq_home` is opened ### Version 2.0 (2019-04-06) - Compatibility with RQ 1.0 (Thanks @selwin). Backward incompatible changes include: * `FailedQueue` is now replaced by `FailedJobRegistry` * RQ now uses `sentry-sdk` to send job failures to Sentry. - Scheduler now respects default `timeout` and `result_ttl` defined in `RQ_QUEUES`. Thanks @simone6021! - Minor improvements and bug fixes. Thanks @selwin! ### Version 1.3.1 (2019-03-15) - Run `rqworker` with `--sentry_dsn=""` to disable Sentry integration. Thanks @Bolayniuss! - Support for `SSL` Redis kwarg. Thanks @ajknv! - `rqworker`and `rqscheduler` management commands now uses RQ's built in `setup_loghandlers` function. Thanks @Paulius-Maruska! - Remove the use of deprecated `admin_static` template tag. Thanks @lorenzomorandini! ### Version 1.3.0 (2018-12-18) - Added support `redis-py` >= 3 and `RQ` >= 0.13. Thanks @selwin! - Use `Worker.count(queue=queue)` to speed up the process of getting the number of active workers. Thanks @selwin! - Added an option to requeue job from the admin interface. Thanks @seiryuz! - Improve Sentinel support. Thanks @pnuckowski! ### Version 1.2.0 (2018-07-26) - Supports Python 3.7 by renaming `async` to `is_async`. Thanks @Flimm! - `UnpickleError` is now handled properly. Thanks @selwin! - Redis Sentinel support. Thanks @SpeedyCoder! ### Version 1.1.0 - Fixed some admin related bugs. Thanks @seiryuz! - More Django 2.0 compatibility fixes. Thanks @selwin and @koddr! - Custom `Job` and `Worker` classes are now supported. Thanks @skirsdeda! - `SENTRY_DSN` value in `settings.py` will now be used by default. Thanks @inetss! ### 1.0.1 - Django 2.0 compatibility fixes. - Minor bug fixes ### 1.0.0 - You can now view worker information - Detailed worker statistics such as failed/completed job count are now shown (requires RQ >= 0.9.0). Thanks @seiryuz! - `rqstats` management command now allows you to monitor queue stats via CLI. Thanks @seiryuz! - Added `/stats.json` endpoint to fetch RQ stats in JSON format, useful for monitoring purposes. Thanks @seiryuz! - Fixed a crash when displaying deferring jobs. Thanks @Hovercross! - Added `sentry-dsn` cli option to `rqworker` management command. Thanks @efi-mk! - Improved performance when requeueing all jobs. Thanks @therefromhere! ### 0.9.6 - More Django 1.10 compatibility fixes. Thanks @dmwyatt! - Improves performance when dealing with a large number of workers. Thanks @lucastamoios! ### 0.9.5 - Fixed view paging for registry-based job lists. Thanks @smaccona! - Fixed an issue where multiple failed queues may appear for the same connection. Thanks @depaolim! - `rqworker` management command now closes all DB connections before executing jobs. Thanks @depaolim! - Fixed an argument parsing bug `rqworker` management command. Thanks @hendi! ### 0.9.3 - Added a `--pid` option to `rqscheduler` management command. Thanks @vindemasi! - Added `--queues` option to `rqworker` management command. Thanks @gasket! - Job results are now shown on admin page. Thanks @mojeto! - Fixed a bug in interpreting `--burst` argument in `rqworker` management command. Thanks @claudep! - Added Requeue All feature in Failed Queue's admin page. Thanks @lucashowell! - Admin interface now shows time in local timezone. Thanks @randomguy91! - Other minor fixes by @jeromer and @sbussetti. ### 0.9.2 - Support for Django 1.10. Thanks @jtburchfield! - Added `--queue-class` option to `rqworker` management command. Thanks @Krukov! ### 0.9.1 - Added `-i` and `--queue` options to rqscheduler management command. Thanks @mbodock and @sbussetti! - Added `--pid` option to `rqworker` management command. Thanks @ydaniv! - Admin interface fixes for Django 1.9. Thanks @philippbosch! - Compatibility fix for `django-redis-cache`. Thanks @scream4ik! - *Backwards incompatible*: Exception handlers are now defined via `RQ_EXCEPTION_HANDLERS` in `settings.py`. Thanks @sbussetti! - Queues in django-admin are now sorted by name. Thanks @pnuckowski! ### 0.9.0 - Support for Django 1.9. Thanks @aaugustin and @viaregio! - `rqworker` management command now accepts `--worker-ttl` argument. Thanks pnuckowski! - You can now easily specify custom `EXCEPTION_HANDLERS` in `settings.py`. Thanks @xuhcc! - `django-rq` now requires RQ >= 0.5.5 ### 0.8.0 - You can now view deferred, finished and currently active jobs from admin interface. - Better support for Django 1.8. Thanks @epicserve and @seiryuz! - Requires RQ >= 0.5. - You can now use StrictRedis with Django-RQ. Thanks @wastrachan! ### 0.7.0 - Added `rqenqueue` management command for easy scheduling of tasks (e.g via cron django-rq-3.1/LICENSE.txt0000664000175000017500000000203515043301563014744 0ustar carstencarstenCopyright (c) 2012 Selwin Ong Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.django-rq-3.1/django_rq/0000775000175000017500000000000015043301563015065 5ustar carstencarstendjango-rq-3.1/django_rq/apps.py0000664000175000017500000000022715043301563016403 0ustar carstencarstenfrom django.apps import AppConfig class DjangoRqAdminConfig(AppConfig): default_auto_field = "django.db.models.AutoField" name = "django_rq" django-rq-3.1/django_rq/admin.py0000664000175000017500000000271215043301563016531 0ustar carstencarstenfrom typing import Any, Dict, Optional from django.contrib import admin from django.http.request import HttpRequest from django.http.response import HttpResponse from . import settings, stats_views, models class QueueAdmin(admin.ModelAdmin): """Admin View for Django-RQ Queue""" def has_add_permission(self, request): return False # Hide the admin "+ Add" link for Queues def has_change_permission(self, request: HttpRequest, obj: Optional[Any] = None) -> bool: return True def has_module_permission(self, request: HttpRequest): """ return True if the given request has any permission in the given app label. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to view the module on the admin index page and access the module's index page. Overriding it does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ return request.user.has_module_perms('django_rq') # type: ignore[union-attr] def changelist_view(self, request: HttpRequest, extra_context: Optional[Dict[str, Any]] = None) -> HttpResponse: """The 'change list' admin view for this model.""" # proxy request to stats view return stats_views.stats(request) if settings.SHOW_ADMIN_LINK: admin.site.register(models.Queue, QueueAdmin) django-rq-3.1/django_rq/__init__.py0000664000175000017500000000041615043301563017177 0ustar carstencarstenVERSION = (3, 1, 0) from .decorators import job from .queues import enqueue, get_connection, get_queue, get_scheduler from .workers import get_worker __all__ = [ "job", "enqueue", "get_connection", "get_queue", "get_scheduler", "get_worker", ] django-rq-3.1/django_rq/py.typed0000664000175000017500000000000015043301563016552 0ustar carstencarstendjango-rq-3.1/django_rq/settings.py0000664000175000017500000000200715043301563017276 0ustar carstencarstenfrom operator import itemgetter from typing import Any, cast, Dict, List, Optional from django.conf import settings from django.core.exceptions import ImproperlyConfigured from .queues import get_unique_connection_configs SHOW_ADMIN_LINK = getattr(settings, 'RQ_SHOW_ADMIN_LINK', False) QUEUES = cast(Dict[str, Any], getattr(settings, 'RQ_QUEUES', None)) if QUEUES is None: raise ImproperlyConfigured("You have to define RQ_QUEUES in settings.py") NAME = getattr(settings, 'RQ_NAME', 'default') BURST: bool = getattr(settings, 'RQ_BURST', False) # All queues in list format so we can get them by index, includes failed queues QUEUES_LIST = [] QUEUES_MAP = {} for key, value in sorted(QUEUES.items(), key=itemgetter(0)): QUEUES_LIST.append({'name': key, 'connection_config': value}) QUEUES_MAP[key] = len(QUEUES_LIST) - 1 # Get exception handlers EXCEPTION_HANDLERS: List[str] = getattr(settings, 'RQ_EXCEPTION_HANDLERS', []) # Token for querying statistics API_TOKEN: str = getattr(settings, 'RQ_API_TOKEN', '') django-rq-3.1/django_rq/views.py0000664000175000017500000005166015043301563016604 0ustar carstencarstenfrom math import ceil from typing import Any, cast, Tuple from django.contrib import admin, messages from django.contrib.admin.views.decorators import staff_member_required from django.http import Http404 from django.shortcuts import redirect, render from django.urls import reverse from django.views.decorators.cache import never_cache from django.views.decorators.http import require_POST from redis.exceptions import ResponseError from rq import requeue_job from rq.exceptions import NoSuchJobError from rq.job import Job, JobStatus from rq.registry import ( DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, ScheduledJobRegistry, StartedJobRegistry, ) from rq.worker import Worker from rq.worker_registration import clean_worker_registry from .queues import get_queue_by_index, get_scheduler_by_index from .settings import QUEUES_MAP from .utils import get_executions, get_jobs, stop_jobs @never_cache @staff_member_required def jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) items_per_page = 100 num_jobs = queue.count page = int(request.GET.get('page', 1)) if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) jobs = queue.get_jobs(offset, items_per_page) else: jobs = [] page_range = [] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, 'job_status': 'Queued', } return render(request, 'django_rq/jobs.html', context_data) @never_cache @staff_member_required def finished_jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = FinishedJobRegistry(queue.name, queue.connection) items_per_page = 100 num_jobs = len(registry) page = int(request.GET.get('page', 1)) if request.GET.get('desc', '1') == '1': sort_direction = 'descending' else: sort_direction = 'ascending' jobs = [] if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) job_ids = registry.get_job_ids(offset, offset + items_per_page - 1, desc=sort_direction == 'descending') jobs = get_jobs(queue, job_ids, registry) else: page_range = [] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, 'sort_direction': sort_direction, } return render(request, 'django_rq/finished_jobs.html', context_data) @never_cache @staff_member_required def failed_jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = FailedJobRegistry(queue.name, queue.connection) items_per_page = 100 num_jobs = len(registry) page = int(request.GET.get('page', 1)) if request.GET.get('desc', '1') == '1': sort_direction = 'descending' else: sort_direction = 'ascending' jobs = [] if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) job_ids = registry.get_job_ids(offset, offset + items_per_page - 1, desc=sort_direction == 'descending') jobs = get_jobs(queue, job_ids, registry) else: page_range = [] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, 'sort_direction': sort_direction, } return render(request, 'django_rq/failed_jobs.html', context_data) @never_cache @staff_member_required def scheduled_jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = ScheduledJobRegistry(queue.name, queue.connection) items_per_page = 100 num_jobs = len(registry) page = int(request.GET.get('page', 1)) jobs = [] if request.GET.get('desc', '1') == '1': sort_direction = 'descending' else: sort_direction = 'ascending' if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) job_ids = registry.get_job_ids(offset, offset + items_per_page - 1, desc=sort_direction== 'descending') jobs = get_jobs(queue, job_ids, registry) for job in jobs: job.scheduled_at = registry.get_scheduled_time(job) # type: ignore[attr-defined] else: page_range = [] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, 'sort_direction': sort_direction, } return render(request, 'django_rq/scheduled_jobs.html', context_data) @never_cache @staff_member_required def started_jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = StartedJobRegistry(queue.name, queue.connection) items_per_page = 100 num_jobs = len(registry) page = int(request.GET.get('page', 1)) jobs = [] executions = [] if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) try: composite_keys = registry.get_job_and_execution_ids(offset, offset + items_per_page - 1) except AttributeError: composite_keys = [ cast(Tuple[str, str], key.split(':')) for key in registry.get_job_ids(offset, offset + items_per_page - 1) ] jobs = get_jobs(queue, [i[0] for i in composite_keys], registry) executions = get_executions(queue, composite_keys) else: page_range = [] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, 'job_status': 'Started', 'executions': executions, } return render(request, 'django_rq/started_job_registry.html', context_data) @never_cache @staff_member_required def workers(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) clean_worker_registry(queue) all_workers = Worker.all(queue.connection) workers = [worker for worker in all_workers if queue.name in worker.queue_names()] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'workers': workers, } return render(request, 'django_rq/workers.html', context_data) @never_cache @staff_member_required def worker_details(request, queue_index, key): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) worker = Worker.find_by_key(key, connection=queue.connection) assert worker # Convert microseconds to milliseconds worker.total_working_time = worker.total_working_time / 1000 queue_names = ', '.join(worker.queue_names()) context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'worker': worker, 'queue_names': queue_names, 'job': worker.get_current_job(), 'total_working_time': worker.total_working_time * 1000, } return render(request, 'django_rq/worker_details.html', context_data) @never_cache @staff_member_required def deferred_jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = DeferredJobRegistry(queue.name, queue.connection) items_per_page = 100 num_jobs = len(registry) page = int(request.GET.get('page', 1)) jobs = [] if request.GET.get('desc', '1') == '1': sort_direction = 'descending' else: sort_direction = 'ascending' if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) job_ids = registry.get_job_ids(offset, offset + items_per_page - 1, desc=sort_direction == 'descending') for job_id in job_ids: try: jobs.append(Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer)) except NoSuchJobError: pass else: page_range = [] context_data = { **admin.site.each_context(request), 'queue': queue, 'queue_index': queue_index, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, 'job_status': 'Deferred', 'sort_direction': sort_direction, } return render(request, 'django_rq/deferred_jobs.html', context_data) @never_cache @staff_member_required def job_detail(request, queue_index, job_id): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) try: job = Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer) except NoSuchJobError: raise Http404("Couldn't find job with this ID: %s" % job_id) try: job.func_name data_is_valid = True except: data_is_valid = False # Backward compatibility support for RQ < 1.12.0 rv = job.connection.hget(job.key, 'result') if rv is not None: # cache the result job.legacy_result = job.serializer.loads(rv) # type: ignore[attr-defined] try: exc_info = job._exc_info except AttributeError: exc_info = None dependencies = [] # if job._dependency_ids: # Fetch dependencies if they exist # dependencies = Job.fetch_many( # job._dependency_ids, connection=queue.connection, serializer=queue.serializer # ) for dependency_id in job._dependency_ids: try: dependency = Job.fetch(dependency_id, connection=queue.connection, serializer=queue.serializer) except NoSuchJobError: dependency = None dependencies.append((dependency_id, dependency)) context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'job': job, 'queue': queue, 'data_is_valid': data_is_valid, 'exc_info': exc_info, 'dependencies': dependencies, } return render(request, 'django_rq/job_detail.html', context_data) @never_cache @staff_member_required def delete_job(request, queue_index, job_id): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) job = Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer) if request.method == 'POST': # Remove job id from queue and delete the actual job queue.connection.lrem(queue.key, 0, job.id) job.delete() messages.info(request, 'You have successfully deleted %s' % job.id) return redirect('rq_jobs', queue_index) context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'job': job, 'queue': queue, } return render(request, 'django_rq/delete_job.html', context_data) @never_cache @staff_member_required def requeue_job_view(request, queue_index, job_id): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) job = Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer) if request.method == 'POST': requeue_job(job_id, connection=queue.connection, serializer=queue.serializer) messages.info(request, 'You have successfully requeued %s' % job.id) return redirect('rq_job_detail', queue_index, job_id) context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'job': job, 'queue': queue, } return render(request, 'django_rq/delete_job.html', context_data) @never_cache @staff_member_required def clear_queue(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) if request.method == 'POST': try: queue.empty() messages.info(request, 'You have successfully cleared the queue %s' % queue.name) except ResponseError as e: try: suppress = 'EVALSHA' in e.message # type: ignore[attr-defined] except AttributeError: suppress = 'EVALSHA' in str(e) if suppress: messages.error( request, 'This action is not supported on Redis versions < 2.6.0, please use the bulk delete command instead', ) else: raise e return redirect('rq_jobs', queue_index) context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'queue': queue, } return render(request, 'django_rq/clear_queue.html', context_data) @never_cache @staff_member_required def requeue_all(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = FailedJobRegistry(queue=queue) if request.method == 'POST': job_ids = registry.get_job_ids() count = 0 # Confirmation received for job_id in job_ids: try: requeue_job(job_id, connection=queue.connection, serializer=queue.serializer) count += 1 except NoSuchJobError: pass messages.info(request, 'You have successfully requeued %d jobs!' % count) return redirect('rq_jobs', queue_index) context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'queue': queue, 'total_jobs': len(registry), } return render(request, 'django_rq/requeue_all.html', context_data) @never_cache @staff_member_required def delete_failed_jobs(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) registry = FailedJobRegistry(queue=queue) if request.method == 'POST': job_ids = registry.get_job_ids() jobs = Job.fetch_many(job_ids, connection=queue.connection) count = 0 for job in jobs: if job: job.delete() count += 1 messages.info(request, 'You have successfully deleted %d jobs!' % count) return redirect('rq_home') context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'queue': queue, 'total_jobs': len(registry), } return render(request, 'django_rq/clear_failed_queue.html', context_data) @never_cache @staff_member_required def confirm_action(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) next_url = request.META.get('HTTP_REFERER') or reverse('rq_jobs', args=[queue_index]) if request.method == 'POST' and request.POST.get('action', False): # confirm action if request.POST.get('_selected_action', False): context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'action': request.POST['action'], 'job_ids': request.POST.getlist('_selected_action'), 'queue': queue, 'next_url': next_url, } return render(request, 'django_rq/confirm_action.html', context_data) return redirect(next_url) @never_cache @staff_member_required def actions(request, queue_index): queue_index = int(queue_index) queue = get_queue_by_index(queue_index) next_url = request.POST.get('next_url') or reverse('rq_jobs', args=[queue_index]) if request.method == 'POST' and request.POST.get('action', False): # do confirmed action if request.POST.get('job_ids', False): job_ids = request.POST.getlist('job_ids') if request.POST['action'] == 'delete': for job_id in job_ids: job = Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer) # Remove job id from queue and delete the actual job queue.connection.lrem(queue.key, 0, job.id) job.delete() messages.info(request, 'You have successfully deleted %s jobs!' % len(job_ids)) elif request.POST['action'] == 'requeue': for job_id in job_ids: requeue_job(job_id, connection=queue.connection, serializer=queue.serializer) messages.info(request, 'You have successfully requeued %d jobs!' % len(job_ids)) elif request.POST['action'] == 'stop': stopped, failed_to_stop = stop_jobs(queue, job_ids) if len(stopped) > 0: messages.info(request, 'You have successfully stopped %d jobs!' % len(stopped)) if len(failed_to_stop) > 0: messages.error(request, '%d jobs failed to stop!' % len(failed_to_stop)) return redirect(next_url) @never_cache @staff_member_required def enqueue_job(request, queue_index, job_id): """Enqueue deferred jobs""" queue_index = int(queue_index) queue = get_queue_by_index(queue_index) job = Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer) if request.method == 'POST': try: # _enqueue_job is new in RQ 1.14, this is used to enqueue # job regardless of its dependencies queue._enqueue_job(job) except AttributeError: queue.enqueue_job(job) # Remove job from correct registry if needed registry: Any if job.get_status() == JobStatus.DEFERRED: registry = DeferredJobRegistry(queue.name, queue.connection) registry.remove(job) elif job.get_status() == JobStatus.FINISHED: registry = FinishedJobRegistry(queue.name, queue.connection) registry.remove(job) elif job.get_status() == JobStatus.SCHEDULED: registry = ScheduledJobRegistry(queue.name, queue.connection) registry.remove(job) messages.info(request, 'You have successfully enqueued %s' % job.id) return redirect('rq_job_detail', queue_index, job_id) context_data = { **admin.site.each_context(request), 'queue_index': queue_index, 'job': job, 'queue': queue, } return render(request, 'django_rq/delete_job.html', context_data) @never_cache @staff_member_required @require_POST def stop_job(request, queue_index, job_id): """Stop started job""" queue_index = int(queue_index) queue = get_queue_by_index(queue_index) stopped, _ = stop_jobs(queue, job_id) if len(stopped) == 1: messages.info(request, 'You have successfully stopped %s' % job_id) return redirect('rq_job_detail', queue_index, job_id) else: messages.error(request, 'Failed to stop %s' % job_id) return redirect('rq_job_detail', queue_index, job_id) @never_cache @staff_member_required def scheduler_jobs(request, scheduler_index): scheduler = get_scheduler_by_index(scheduler_index) items_per_page = 100 num_jobs = scheduler.count() page = int(request.GET.get('page', 1)) jobs = [] if num_jobs > 0: last_page = int(ceil(num_jobs / items_per_page)) page_range = list(range(1, last_page + 1)) offset = items_per_page * (page - 1) jobs_times = scheduler.get_jobs(with_times=True, offset=offset, length=items_per_page) for job, time in jobs_times: job.next_run = time job.queue_index = QUEUES_MAP.get(job.origin, 0) if 'cron_string' in job.meta: job.schedule = f"cron: '{job.meta['cron_string']}'" elif 'interval' in job.meta: job.schedule = f"interval: {job.meta['interval']}" if 'repeat' in job.meta: job.schedule += f" repeat: {job.meta['repeat']}" else: job.schedule = 'unknown' jobs.append(job) else: page_range = [] context_data = { **admin.site.each_context(request), 'scheduler': scheduler, 'jobs': jobs, 'num_jobs': num_jobs, 'page': page, 'page_range': page_range, } return render(request, 'django_rq/scheduler.html', context_data) django-rq-3.1/django_rq/contrib/0000775000175000017500000000000015043301563016525 5ustar carstencarstendjango-rq-3.1/django_rq/contrib/prometheus.py0000664000175000017500000000633415043301563021300 0ustar carstencarstenfrom rq.job import JobStatus from ..queues import filter_connection_params, get_connection, get_queue, get_unique_connection_configs from ..workers import get_worker_class try: from prometheus_client import Summary from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily class RQCollector: """RQ stats collector""" summary = Summary('rq_request_processing_seconds_total', 'Time spent collecting RQ data') def collect(self): from ..settings import QUEUES with self.summary.time(): rq_workers = GaugeMetricFamily('rq_workers', 'RQ workers', labels=['name', 'state', 'queues']) rq_job_successful_total = CounterMetricFamily('rq_job_successful_total', 'RQ successful job count', labels=['name', 'queues']) rq_job_failed_total = CounterMetricFamily('rq_job_failed_total', 'RQ failed job count', labels=['name', 'queues']) rq_working_seconds_total = CounterMetricFamily('rq_working_seconds_total', 'RQ total working time', labels=['name', 'queues']) rq_jobs = GaugeMetricFamily('rq_jobs', 'RQ jobs by status', labels=['queue', 'status']) worker_class = get_worker_class() unique_configs = get_unique_connection_configs() connections = {} for queue_name, config in QUEUES.items(): index = unique_configs.index(filter_connection_params(config)) if index not in connections: connections[index] = connection = get_connection(queue_name) for worker in worker_class.all(connection): name = worker.name label_queues = ','.join(worker.queue_names()) rq_workers.add_metric([name, worker.get_state(), label_queues], 1) rq_job_successful_total.add_metric([name, label_queues], worker.successful_job_count) rq_job_failed_total.add_metric([name, label_queues], worker.failed_job_count) rq_working_seconds_total.add_metric([name, label_queues], worker.total_working_time) else: connection = connections[index] queue = get_queue(queue_name, connection=connection) rq_jobs.add_metric([queue_name, JobStatus.QUEUED], queue.count) rq_jobs.add_metric([queue_name, JobStatus.STARTED], queue.started_job_registry.count) rq_jobs.add_metric([queue_name, JobStatus.FINISHED], queue.finished_job_registry.count) rq_jobs.add_metric([queue_name, JobStatus.FAILED], queue.failed_job_registry.count) rq_jobs.add_metric([queue_name, JobStatus.DEFERRED], queue.deferred_job_registry.count) rq_jobs.add_metric([queue_name, JobStatus.SCHEDULED], queue.scheduled_job_registry.count) yield rq_workers yield rq_job_successful_total yield rq_job_failed_total yield rq_working_seconds_total yield rq_jobs except ImportError: RQCollector = None # type: ignore[assignment, misc] django-rq-3.1/django_rq/decorators.py0000664000175000017500000000414715043301563017612 0ustar carstencarstenfrom rq.decorators import job as _rq_job from typing import Any, Callable, Optional, overload, Protocol, TYPE_CHECKING, TypeVar, Union from .queues import get_queue, get_result_ttl if TYPE_CHECKING: from redis import Redis from rq import Queue from typing_extensions import ParamSpec P = ParamSpec('P') R = TypeVar('R', covariant=True) class _JobFn(Protocol[P, R]): def delay(self, *args: P.args, **kwargs: P.kwargs) -> R: ... def enqueue(self, *args: P.args, **kwargs: P.kwargs) -> R: ... def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... @overload def job(func_or_queue: 'Callable[P, R]') -> '_JobFn[P, R]': ... @overload def job( func_or_queue: Union['Queue', str], connection: Optional['Redis'] = None, *args: Any, **kwargs: Any, ) -> Callable[['Callable[P, R]'], '_JobFn[P, R]']: ... def job( func_or_queue: Union['Callable[P, R]', 'Queue', str], connection: Optional['Redis'] = None, *args: Any, **kwargs: Any, ) -> Union['_JobFn[P, R]', Callable[['Callable[P, R]'], '_JobFn[P, R]']]: """ The same as RQ's job decorator, but it automatically works out the ``connection`` argument from RQ_QUEUES. And also, it allows simplified ``@job`` syntax to put job into default queue. If ``result_ttl`` is not passed, It set the default ttl to the queue `DEFAULT_RESULT_TTL`. """ if callable(func_or_queue): func = func_or_queue queue: Union['Queue', str] = 'default' else: func = None queue = func_or_queue queue_name = 'default' if isinstance(queue, str): queue_name = queue try: queue = get_queue(queue) if connection is None: connection = queue.connection except KeyError: pass else: if connection is None: connection = queue.connection kwargs['result_ttl'] = kwargs.get('result_ttl', get_result_ttl(queue_name)) kwargs['connection'] = connection decorator = _rq_job(queue, *args, **kwargs) if func: return decorator(func) return decorator django-rq-3.1/django_rq/stats_views.py0000664000175000017500000000474115043301563020020 0ustar carstencarstenfrom secrets import compare_digest from django.contrib import admin from django.contrib.admin.views.decorators import staff_member_required from django.http import Http404, HttpResponse, JsonResponse from django.shortcuts import render from django.views.decorators.cache import never_cache from .settings import API_TOKEN from .utils import get_scheduler_statistics, get_statistics try: import prometheus_client from .contrib.prometheus import RQCollector except ImportError: prometheus_client = RQCollector = None # type: ignore[assignment, misc] registry = None def is_authorized(request): auth_header = request.headers.get("Authorization", "") token = None if auth_header.startswith("Bearer "): token = auth_header.removeprefix("Bearer ").strip() return request.user.is_staff or (API_TOKEN and token and compare_digest(API_TOKEN, token)) @never_cache def prometheus_metrics(request): if not is_authorized(request): return JsonResponse( {"error": True, "description": "Missing bearer token. Set token in headers and configure RQ_API_TOKEN in settings.py"} ) global registry if not RQCollector: # type: ignore[truthy-function] raise Http404('prometheus_client has not been installed; install using extra "django-rq[prometheus]"') if not registry: registry = prometheus_client.CollectorRegistry(auto_describe=True) registry.register(RQCollector()) encoder, content_type = prometheus_client.exposition.choose_encoder(request.META.get('HTTP_ACCEPT', '')) if 'name[]' in request.GET: registry = registry.restricted_registry(request.GET.getlist('name[]')) return HttpResponse(encoder(registry), headers={'Content-Type': content_type}) @never_cache @staff_member_required def stats(request): context_data = { **admin.site.each_context(request), **get_statistics(run_maintenance_tasks=True), **get_scheduler_statistics(), "view_metrics": RQCollector is not None, } return render(request, 'django_rq/stats.html', context_data) @never_cache def stats_json(request, token=None): if not is_authorized(request): if token and token == API_TOKEN: return JsonResponse(get_statistics()) else: return JsonResponse( {"error": True, "description": "Missing bearer token. Set token in headers and configure RQ_API_TOKEN in settings.py"} ) return JsonResponse(get_statistics()) django-rq-3.1/django_rq/urls.py0000664000175000017500000000505615043301563016432 0ustar carstencarstenfrom django.urls import re_path from . import stats_views, views from .contrib.prometheus import RQCollector metrics_view = [ re_path(r'^metrics/?$', stats_views.prometheus_metrics, name='rq_metrics'), ] if RQCollector else [] # type: ignore[truthy-function] urlpatterns = [ re_path(r'^$', stats_views.stats, name='rq_home'), re_path(r'^stats.json/?$', stats_views.stats_json, name='rq_home_json'), re_path(r'^stats.json/(?P[\w]+)?/?$', stats_views.stats_json, name='rq_home_json'), *metrics_view, re_path(r'^queues/(?P[\d]+)/$', views.jobs, name='rq_jobs'), re_path(r'^workers/(?P[\d]+)/$', views.workers, name='rq_workers'), re_path(r'^workers/(?P[\d]+)/(?P[-\w\.\:\$]+)/$', views.worker_details, name='rq_worker_details'), re_path(r'^queues/(?P[\d]+)/finished/$', views.finished_jobs, name='rq_finished_jobs'), re_path(r'^queues/(?P[\d]+)/failed/$', views.failed_jobs, name='rq_failed_jobs'), re_path(r'^queues/(?P[\d]+)/failed/clear/$', views.delete_failed_jobs, name='rq_delete_failed_jobs'), re_path(r'^queues/(?P[\d]+)/scheduled/$', views.scheduled_jobs, name='rq_scheduled_jobs'), re_path(r'^queues/(?P[\d]+)/started/$', views.started_jobs, name='rq_started_jobs'), re_path(r'^queues/(?P[\d]+)/deferred/$', views.deferred_jobs, name='rq_deferred_jobs'), re_path(r'^queues/(?P[\d]+)/empty/$', views.clear_queue, name='rq_clear'), re_path(r'^queues/(?P[\d]+)/requeue-all/$', views.requeue_all, name='rq_requeue_all'), re_path(r'^queues/(?P[\d]+)/(?P[^/]+)/$', views.job_detail, name='rq_job_detail'), re_path( r'^queues/(?P[\d]+)/(?P[^/]+)/delete/$', views.delete_job, name='rq_delete_job' ), re_path(r'^queues/confirm-action/(?P[\d]+)/$', views.confirm_action, name='rq_confirm_action'), re_path(r'^queues/actions/(?P[\d]+)/$', views.actions, name='rq_actions'), re_path( r'^queues/(?P[\d]+)/(?P[^/]+)/requeue/$', views.requeue_job_view, name='rq_requeue_job', ), re_path( r'^queues/(?P[\d]+)/(?P[^/]+)/enqueue/$', views.enqueue_job, name='rq_enqueue_job' ), re_path( r'^queues/(?P[\d]+)/(?P[^/]+)/stop/$', views.stop_job, name='rq_stop_job' ), re_path(r'^schedulers/(?P[\d]+)/$', views.scheduler_jobs, name='rq_scheduler_jobs'), ] django-rq-3.1/django_rq/utils.py0000664000175000017500000001753015043301563016605 0ustar carstencarstenfrom typing import cast, Optional, List, Tuple, Union from django.core.exceptions import ImproperlyConfigured from django.db import connections from redis.sentinel import SentinelConnectionPool from rq.command import send_stop_job_command from rq.executions import Execution from rq.job import Job from rq.registry import ( DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, ScheduledJobRegistry, StartedJobRegistry, clean_registries, ) from rq.worker import Worker from rq.worker_registration import clean_worker_registry from .queues import get_connection, get_queue_by_index, get_scheduler from .settings import QUEUES_LIST from .templatetags.django_rq import to_localtime def get_scheduler_pid(queue): '''Checks whether there's a scheduler-lock on a particular queue, and returns the PID. It Only works with RQ's Built-in RQScheduler. When RQ-Scheduler is available returns False If not, it checks the RQ's RQScheduler for a scheduler lock in the desired queue Note: result might have some delay (1-15 minutes) but it helps visualizing whether the setup is working correctly ''' try: # first try get the rq-scheduler scheduler = get_scheduler(queue.name) # should fail if rq_scheduler not present return False # Not possible to give useful information without creating a performance issue (redis.keys()) except ImproperlyConfigured: from rq.scheduler import RQScheduler # When a scheduler acquires a lock it adds an expiring key: (e.g: rq:scheduler-lock:) #TODO: (RQ>= 1.13) return queue.scheduler_pid pid = queue.connection.get(RQScheduler.get_locking_key(queue.name)) return int(pid.decode()) if pid is not None else None except Exception as e: pass # Return None return None def get_statistics(run_maintenance_tasks=False): queues = [] for index, config in enumerate(QUEUES_LIST): queue = get_queue_by_index(index) connection = queue.connection connection_kwargs = connection.connection_pool.connection_kwargs if run_maintenance_tasks: clean_registries(queue) clean_worker_registry(queue) # Raw access to the first item from left of the redis list. # This might not be accurate since new job can be added from the left # with `at_front` parameters. # Ideally rq should supports Queue.oldest_job last_job_id = connection.lindex(queue.key, 0) last_job = queue.fetch_job(last_job_id.decode('utf-8')) if last_job_id else None if last_job: oldest_job_timestamp = to_localtime(last_job.enqueued_at).strftime('%Y-%m-%d, %H:%M:%S') else: oldest_job_timestamp = "-" # remove unneeded properties which are not serializable in JSON connection_kwargs.pop('connection_pool', None) connection_kwargs.pop('parser_class', None) connection_kwargs.pop('retry', None) queue_data = { 'name': queue.name, 'jobs': queue.count, 'oldest_job_timestamp': oldest_job_timestamp, 'index': index, 'connection_kwargs': connection_kwargs, 'scheduler_pid': get_scheduler_pid(queue), } connection = get_connection(queue.name) queue_data['workers'] = Worker.count(queue=queue) finished_job_registry = FinishedJobRegistry(queue.name, connection) started_job_registry = StartedJobRegistry(queue.name, connection) deferred_job_registry = DeferredJobRegistry(queue.name, connection) failed_job_registry = FailedJobRegistry(queue.name, connection) scheduled_job_registry = ScheduledJobRegistry(queue.name, connection) queue_data['finished_jobs'] = len(finished_job_registry) queue_data['started_jobs'] = len(started_job_registry) queue_data['deferred_jobs'] = len(deferred_job_registry) queue_data['failed_jobs'] = len(failed_job_registry) queue_data['scheduled_jobs'] = len(scheduled_job_registry) queues.append(queue_data) return {'queues': queues} def get_scheduler_statistics(): schedulers = {} for index, config in enumerate(QUEUES_LIST): # there is only one scheduler per redis connection, so we use the connection as key # to handle the possibility of a configuration with multiple redis connections and scheduled # jobs in more than one of them queue = get_queue_by_index(index) if isinstance(queue.connection.connection_pool, SentinelConnectionPool): first_sentinel = queue.connection.connection_pool.sentinel_manager.sentinels[0] connection = first_sentinel.connection_pool.connection_kwargs else: connection = queue.connection.connection_pool.connection_kwargs conn_key = f"{connection.get('host', 'NOHOST')}:{connection.get('port', 6379)}/{connection.get('db', 0)}" if conn_key not in schedulers: try: scheduler = get_scheduler(config['name']) schedulers[conn_key] ={ 'count': scheduler.count(), 'index': index, } except ImproperlyConfigured: pass return {'schedulers': schedulers} def get_jobs( queue, job_ids, registry: Optional[ Union[ DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, ScheduledJobRegistry, StartedJobRegistry, ] ] = None, ) -> List[Job]: """Fetch jobs in bulk from Redis. 1. If job data is not present in Redis, discard the result 2. If `registry` argument is supplied, delete empty jobs from registry """ jobs = Job.fetch_many(job_ids, connection=queue.connection, serializer=queue.serializer) valid_jobs = [] for i, job in enumerate(jobs): if job is None: if registry: registry.remove(job_ids[i]) else: valid_jobs.append(job) return valid_jobs def get_executions(queue, composite_keys: List[Tuple[str, str]]) -> List[Execution]: """Fetch executions in bulk from Redis. 1. If execution data is not present in Redis, discard the result """ executions = [] for job_id, id in composite_keys: try: executions.append(Execution.fetch(id=id, job_id=job_id, connection=queue.connection)) except ValueError: pass return executions def stop_jobs(queue, job_ids): job_ids = job_ids if isinstance(job_ids, (list, tuple)) else [job_ids] stopped_job_ids = [] failed_to_stop_job_ids = [] for job_id in job_ids: try: send_stop_job_command(queue.connection, job_id) except Exception: failed_to_stop_job_ids.append(job_id) continue stopped_job_ids.append(job_id) return stopped_job_ids, failed_to_stop_job_ids def reset_db_connections(): for c in connections.all(): c.close() def configure_sentry(sentry_dsn, **options): """ Configure the Sentry client. The **options kwargs are passed straight from the command invocation - options relevant to Sentry configuration are extracted. In addition to the 'debug' and 'ca_certs' options, which can be passed in as command options, we add the RqIntegration and DjangoIntegration to the config. Raises ImportError if the sentry_sdk is not available. """ import sentry_sdk sentry_options = { 'debug': options.get('sentry_debug', False), 'ca_certs': options.get('sentry_ca_certs', None), 'integrations': [ sentry_sdk.integrations.redis.RedisIntegration(), sentry_sdk.integrations.rq.RqIntegration(), sentry_sdk.integrations.django.DjangoIntegration() ] } sentry_sdk.init(sentry_dsn, **sentry_options) django-rq-3.1/django_rq/tests/0000775000175000017500000000000015043301563016227 5ustar carstencarstendjango-rq-3.1/django_rq/tests/test_views.py0000664000175000017500000004440315043301563021002 0ustar carstencarstenimport uuid from datetime import datetime, timedelta, timezone from unittest.mock import PropertyMock, patch from django.contrib.auth.models import User from django.test import TestCase, override_settings from django.test.client import Client from django.urls import reverse from rq.job import Job, JobStatus from rq.registry import ( DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, ScheduledJobRegistry, StartedJobRegistry, ) from django_rq import get_queue from django_rq.queues import get_scheduler from django_rq.workers import get_worker from .fixtures import access_self, failing_job from .utils import get_queue_index @override_settings(RQ={'AUTOCOMMIT': True}) class ViewTest(TestCase): def setUp(self): self.user = User.objects.create_user('foo', password='pass') self.user.is_staff = True self.user.is_active = True self.user.save() self.client = Client() self.client.login(username=self.user.username, password='pass') get_queue('django_rq_test').connection.flushall() def test_jobs(self): """Jobs in queue are displayed properly""" queue = get_queue('default') job = queue.enqueue(access_self) queue_index = get_queue_index('default') response = self.client.get(reverse('rq_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], [job]) def test_job_details(self): """Job data is displayed properly""" queue = get_queue('default') job = queue.enqueue(access_self) queue_index = get_queue_index('default') url = reverse('rq_job_detail', args=[queue_index, job.id]) response = self.client.get(url) self.assertEqual(response.context['job'], job) # This page shouldn't fail when job.data is corrupt queue.connection.hset(job.key, 'data', 'unpickleable data') response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertIn('DeserializationError', response.content.decode()) def test_job_details_with_results(self): """Job with results is displayed properly""" queue = get_queue('default') job = queue.enqueue(access_self) queue_index = get_queue_index('default') worker = get_worker('default') worker.work(burst=True) result = job.results()[0] url = reverse('rq_job_detail', args=[queue_index, job.id]) response = self.client.get(url) assert result.id self.assertContains(response, result.id) def test_job_details_on_deleted_dependency(self): """Page doesn't crash even if job.dependency has been deleted""" queue = get_queue('default') queue_index = get_queue_index('default') job = queue.enqueue(access_self) second_job = queue.enqueue(access_self, depends_on=job) job.delete() url = reverse('rq_job_detail', args=[queue_index, second_job.id]) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertIn(second_job._dependency_id, response.content.decode()) def test_requeue_job(self): """ Ensure that a failed job gets requeued when rq_requeue_job is called """ queue = get_queue('default') queue_index = get_queue_index('default') job = queue.enqueue(failing_job) worker = get_worker('default') worker.work(burst=True) job.refresh() self.assertTrue(job.is_failed) self.client.post(reverse('rq_requeue_job', args=[queue_index, job.id]), {'requeue': 'Requeue'}) self.assertIn(job, queue.jobs) job.delete() def test_requeue_all(self): """ Ensure that requeuing all failed job work properly """ queue = get_queue('default') queue_index = get_queue_index('default') queue.enqueue(failing_job) queue.enqueue(failing_job) worker = get_worker('default') worker.work(burst=True) response = self.client.get(reverse('rq_requeue_all', args=[queue_index])) self.assertEqual(response.context['total_jobs'], 2) # After requeue_all is called, jobs are enqueued response = self.client.post(reverse('rq_requeue_all', args=[queue_index])) self.assertEqual(len(queue), 2) def test_requeue_all_if_deleted_job(self): """ Ensure that requeuing all failed job work properly """ queue = get_queue('default') queue_index = get_queue_index('default') job = queue.enqueue(failing_job) queue.enqueue(failing_job) worker = get_worker('default') worker.work(burst=True) response = self.client.get(reverse('rq_requeue_all', args=[queue_index])) self.assertEqual(response.context['total_jobs'], 2) job.delete() # After requeue_all is called, jobs are enqueued response = self.client.post(reverse('rq_requeue_all', args=[queue_index])) self.assertEqual(len(queue), 1) def test_delete_job(self): """ In addition to deleting job from Redis, the job id also needs to be deleted from Queue. """ queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') job = queue.enqueue(access_self) self.client.post(reverse('rq_delete_job', args=[queue_index, job.id]), {'post': 'yes'}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids) def test_action_delete_jobs(self): queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # enqueue some jobs job_ids = [] for _ in range(0, 3): job = queue.enqueue(access_self) job_ids.append(job.id) # remove those jobs using view self.client.post(reverse('rq_actions', args=[queue_index]), {'action': 'delete', 'job_ids': job_ids}) # check if jobs are removed for job_id in job_ids: self.assertFalse(Job.exists(job_id, connection=queue.connection)) self.assertNotIn(job_id, queue.job_ids) def test_enqueue_jobs(self): queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # enqueue some jobs that depends on other previous_job = None for _ in range(0, 3): job = queue.enqueue(access_self, depends_on=previous_job) previous_job = job # This job is deferred last_job = job self.assertEqual(last_job.get_status(), JobStatus.DEFERRED) self.assertIsNone(last_job.enqueued_at) # We want to force-enqueue this job response = self.client.post(reverse('rq_enqueue_job', args=[queue_index, last_job.id])) # Check that job is updated correctly last_job = queue.fetch_job(last_job.id) assert last_job self.assertEqual(last_job.get_status(), JobStatus.QUEUED) self.assertIsNotNone(last_job.enqueued_at) def test_action_requeue_jobs(self): queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # enqueue some jobs that will fail jobs = [] job_ids = [] for _ in range(0, 3): job = queue.enqueue(failing_job) jobs.append(job) job_ids.append(job.id) # do those jobs = fail them worker = get_worker('django_rq_test') worker.work(burst=True) # check if all jobs are really failed for job in jobs: self.assertTrue(job.is_failed) # renqueue failed jobs from failed queue self.client.post(reverse('rq_actions', args=[queue_index]), {'action': 'requeue', 'job_ids': job_ids}) # check if we requeue all failed jobs for job in jobs: self.assertFalse(job.is_failed) def test_clear_queue(self): """Test that the queue clear actually clears the queue.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') job = queue.enqueue(access_self) self.client.post(reverse('rq_clear', args=[queue_index]), {'post': 'yes'}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids) def test_finished_jobs(self): """Ensure that finished jobs page works properly.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') job = queue.enqueue(access_self) registry = FinishedJobRegistry(queue.name, queue.connection) registry.add(job, 2) response = self.client.get(reverse('rq_finished_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], [job]) def test_failed_jobs(self): """Ensure that failed jobs page works properly.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # Test that page doesn't fail when FailedJobRegistry is empty response = self.client.get(reverse('rq_failed_jobs', args=[queue_index])) self.assertEqual(response.status_code, 200) job = queue.enqueue(access_self) registry = FailedJobRegistry(queue.name, queue.connection) registry.add(job, 2) response = self.client.get(reverse('rq_failed_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], [job]) def test_scheduled_jobs(self): """Ensure that scheduled jobs page works properly.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # Test that page doesn't fail when ScheduledJobRegistry is empty response = self.client.get(reverse('rq_scheduled_jobs', args=[queue_index])) self.assertEqual(response.status_code, 200) job = queue.enqueue_at(datetime.now(), access_self) response = self.client.get(reverse('rq_scheduled_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], [job]) # Test that page doesn't crash when job_id has special characters (exclude :) queue.enqueue_at(datetime.now(), access_self, job_id="job-!@#$%^&*()_=+[]{};',.<>?|`~") response = self.client.get(reverse('rq_scheduled_jobs', args=[queue_index])) self.assertEqual(response.status_code, 200) def test_scheduled_jobs_registry_removal(self): """Ensure that non existing job is being deleted from registry by view""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') registry = ScheduledJobRegistry(queue.name, queue.connection) job = queue.enqueue_at(datetime.now(), access_self) self.assertEqual(len(registry), 1) queue.connection.delete(job.key) response = self.client.get(reverse('rq_scheduled_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], []) self.assertEqual(len(registry), 0) def test_started_jobs(self): """Ensure that active jobs page works properly.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') worker = get_worker('django_rq_test') job = queue.enqueue(access_self) worker.prepare_execution(job) response = self.client.get(reverse('rq_started_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], [job]) def test_deferred_jobs(self): """Ensure that active jobs page works properly.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') job = queue.enqueue(access_self) registry = DeferredJobRegistry(queue.name, queue.connection) registry.add(job, 2) response = self.client.get(reverse('rq_deferred_jobs', args=[queue_index])) self.assertEqual(response.context['jobs'], [job]) def test_workers(self): """Worker index page should show workers for a specific queue""" queue_index = get_queue_index('django_rq_test') worker1 = get_worker('django_rq_test', name=uuid.uuid4().hex) worker1.register_birth() worker2 = get_worker('test3') worker2.register_birth() response = self.client.get(reverse('rq_workers', args=[queue_index])) self.assertEqual(response.context['workers'], [worker1]) def test_worker_details(self): """Worker index page should show workers for a specific queue""" queue_index = get_queue_index('django_rq_test') worker = get_worker('django_rq_test', name=uuid.uuid4().hex) worker.register_birth() response = self.client.get(reverse('rq_worker_details', args=[queue_index, worker.key])) self.assertEqual(response.context['worker'], worker) def test_statistics_json_view(self): """ Django-RQ's statistic as JSON only viewable by staff or with API_TOKEN """ # Override testing RQ_QUEUES queues = [ { 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': 'default', } ] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): response = self.client.get(reverse('rq_home')) self.assertEqual(response.status_code, 200) response = self.client.get(reverse('rq_home_json')) self.assertEqual(response.status_code, 200) # Not staff, only token self.user.is_staff = False self.user.save() response = self.client.get(reverse('rq_home')) self.assertEqual(response.status_code, 302) # Error, but with 200 code response = self.client.get(reverse('rq_home_json')) self.assertEqual(response.status_code, 200) self.assertIn("error", response.content.decode('utf-8')) # With token, token = '12345abcde' with patch('django_rq.stats_views.API_TOKEN', new_callable=PropertyMock(return_value=token)): response = self.client.get(reverse('rq_home_json', args=[token])) self.assertEqual(response.status_code, 200) self.assertIn("name", response.content.decode('utf-8')) self.assertNotIn('"error": true', response.content.decode('utf-8')) # Wrong token response = self.client.get(reverse('rq_home_json', args=["wrong_token"])) self.assertEqual(response.status_code, 200) self.assertNotIn("name", response.content.decode('utf-8')) self.assertIn('"error": true', response.content.decode('utf-8')) def test_action_stop_jobs(self): queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # Enqueue some jobs job_ids, jobs = [], [] worker = get_worker('django_rq_test') # Due to implementation details in RQ v2.x, this test only works # with a single job. This test should be changed to use mocks for _ in range(1): job = queue.enqueue(access_self) job_ids.append(job.id) jobs.append(job) worker.prepare_job_execution(job) worker.prepare_execution(job) # Check if the jobs are started for job_id in job_ids: job = Job.fetch(job_id, connection=queue.connection) self.assertEqual(job.get_status(), JobStatus.STARTED) # Stop those jobs using the view started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection) self.assertEqual(len(started_job_registry), len(job_ids)) self.client.post(reverse('rq_actions', args=[queue_index]), {'action': 'stop', 'job_ids': job_ids}) for job in jobs: worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started self.assertEqual(len(started_job_registry), 0) canceled_job_registry = FailedJobRegistry(queue.name, connection=queue.connection) self.assertEqual(len(canceled_job_registry), len(job_ids)) for job_id in job_ids: self.assertTrue(job_id in canceled_job_registry) # def test_scheduler_jobs(self): # # Override testing RQ_QUEUES # queues = [ # { # "connection_config": { # "DB": 0, # "HOST": "localhost", # "PORT": 6379, # }, # "name": "default", # } # ] # with patch( # "django_rq.utils.QUEUES_LIST", # new_callable=PropertyMock(return_value=queues), # ): # scheduler = get_scheduler("default") # scheduler_index = get_queue_index("default") # # Enqueue some jobs # cron_job = scheduler.cron("10 9 * * *", func=access_self, id="cron-job") # forever_job = scheduler.schedule( # scheduled_time=datetime.now() + timedelta(minutes=10), # interval=600, # func=access_self, # id="forever-repeat", # ) # repeat_job = scheduler.schedule( # scheduled_time=datetime.now() + timedelta(minutes=30), # repeat=30, # func=access_self, # interval=600, # id="thirty-repeat", # ) # response = self.client.get( # reverse("rq_scheduler_jobs", args=[scheduler_index]) # ) # self.assertEqual(response.context["num_jobs"], 3) # context_jobs = {job.id: job for job in response.context["jobs"]} # self.assertEqual(context_jobs["cron-job"].schedule, "cron: '10 9 * * *'") # self.assertEqual(context_jobs["forever-repeat"].schedule, "interval: 600") # self.assertEqual( # context_jobs["thirty-repeat"].schedule, "interval: 600 repeat: 30" # ) # index_response = self.client.get(reverse("rq_home")) # self.assertEqual( # index_response.context["schedulers"], # {"localhost:6379/1": {"count": 3, "index": 0}}, # ) django-rq-3.1/django_rq/tests/__init__.py0000664000175000017500000000000015043301563020326 0ustar carstencarstendjango-rq-3.1/django_rq/tests/settings.py0000664000175000017500000001346115043301563020446 0ustar carstencarsten# -*- coding: utf-8 -*- import os REDIS_HOST = os.environ.get("REDIS_HOST", 'localhost') SECRET_KEY = 'a' # Detect whether either django-redis or django-redis-cache is installed. This # is only really used to conditionally configure options for the unit tests. # In actually usage, no such check is necessary. try: from django_redis import get_redis_connection REDIS_CACHE_TYPE = 'django-redis' except ImportError: try: import redis_cache REDIS_CACHE_TYPE = 'django-redis-cache' except ImportError: REDIS_CACHE_TYPE = 'none' INSTALLED_APPS = [ 'django.contrib.contenttypes', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.messages', 'django.contrib.sessions', 'django_rq', ] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } if REDIS_CACHE_TYPE == 'django-redis': CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': "redis://127.0.0.1:6379/0", 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', }, }, } elif REDIS_CACHE_TYPE == 'django-redis-cache': CACHES = { 'default': { 'BACKEND': 'redis_cache.cache.RedisCache', 'LOCATION': '%s:6379' % REDIS_HOST, 'KEY_PREFIX': 'django-rq-tests', 'OPTIONS': { 'DB': 2, 'MAX_ENTRIES': 5000, }, }, } LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "rq_console": { "format": "%(asctime)s %(message)s", "datefmt": "%H:%M:%S", }, }, "handlers": { "rq_console": { "level": "DEBUG", # "class": "logging.StreamHandler", "class": "rq.logutils.ColorizingStreamHandler", "formatter": "rq_console", "exclude": ["%(asctime)s"], }, 'null': { 'level': 'DEBUG', 'class': 'logging.NullHandler', }, }, 'loggers': { "rq.worker": {"handlers": ['null'], "level": "ERROR"}, }, } RQ_QUEUES = { 'default': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'DEFAULT_TIMEOUT': 500, 'DEFAULT_RESULT_TTL': 500, }, 'test': { 'HOST': REDIS_HOST, 'PORT': 1, 'DB': 1, }, 'sentinel': { 'SENTINELS': [(REDIS_HOST, 26736), (REDIS_HOST, 26737)], 'MASTER_NAME': 'testmaster', 'DB': 1, 'USERNAME': 'redis-user', 'PASSWORD': 'secret', 'SOCKET_TIMEOUT': 10, 'SENTINEL_KWARGS': {}, }, 'test1': { 'HOST': REDIS_HOST, 'PORT': 1, 'DB': 1, 'DEFAULT_TIMEOUT': 400, 'QUEUE_CLASS': 'django_rq.tests.fixtures.DummyQueue', }, 'test2': { 'HOST': REDIS_HOST, 'PORT': 1, 'DB': 1, }, 'test3': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 1, 'DEFAULT_RESULT_TTL': 800, }, 'async': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 1, 'ASYNC': False, }, 'url': { 'URL': 'redis://username:password@host:1234/', 'DB': 4, }, 'url_with_db': { 'URL': 'redis://username:password@host:1234/5', }, 'url_default_db': { 'URL': 'redis://username:password@host:1234', }, 'django_rq_test': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, }, 'scheduler_scheduler_active_test': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'ASYNC': False, }, 'scheduler_scheduler_inactive_test': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'ASYNC': False, }, 'worker_scheduler_active_test': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'ASYNC': False, }, 'worker_scheduler_inactive_test': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'ASYNC': False, }, 'django-redis': { 'USE_REDIS_CACHE': 'default', }, 'django_rq_test2': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, }, 'test_scheduler': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'DEFAULT_TIMEOUT': 400, }, 'test_serializer': { 'HOST': REDIS_HOST, 'PORT': 6379, 'DB': 0, 'SERIALIZER': 'rq.serializers.JSONSerializer', }, } RQ = { 'AUTOCOMMIT': False, } if REDIS_CACHE_TYPE == 'django-redis-cache': RQ_QUEUES['django-redis-cache'] = {'USE_REDIS_CACHE': 'django-redis-cache'} ROOT_URLCONF = 'django_rq.tests.urls' BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, "templates")], 'APP_DIRS': False, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], 'loaders': [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ], }, }, ] MIDDLEWARE = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) MIDDLEWARE_CLASSES = MIDDLEWARE AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',) django-rq-3.1/django_rq/tests/views.py0000664000175000017500000000051615043301563017740 0ustar carstencarstenfrom django.shortcuts import render from django_rq import get_queue def say_hello(): return 'Hello' def success(request): queue = get_queue() queue.enqueue(say_hello) return render(request, 'django_rq/test.html', {}) def error(request): queue = get_queue() queue.enqueue(say_hello) raise ValueError django-rq-3.1/django_rq/tests/test_prometheus_metrics.py0000664000175000017500000001242615043301563023566 0ustar carstencarstenimport os from unittest import skipIf from unittest.mock import patch from django.contrib.auth.models import User from django.test import TestCase, override_settings from django.test.client import Client from django.urls import NoReverseMatch, reverse from django_rq import get_queue from django_rq.workers import get_worker from .fixtures import access_self, failing_job try: import prometheus_client except ImportError: prometheus_client = None RQ_QUEUES = { 'default': { 'HOST': os.environ.get('REDIS_HOST', 'localhost'), 'PORT': 6379, 'DB': 0, }, } @skipIf(prometheus_client is None, 'prometheus_client is required') @override_settings(RQ={'AUTOCOMMIT': True}) class PrometheusTest(TestCase): def setUp(self): self.user = User.objects.create_user('foo', password='pass') self.user.is_staff = True self.user.is_active = True self.user.save() self.client = Client() self.client.force_login(self.user) get_queue('default').connection.flushall() def assertMetricsContain(self, lines): response = self.client.get(reverse('rq_metrics')) self.assertEqual(response.status_code, 200) self.assertLessEqual( lines, set(response.content.decode('utf-8').splitlines()) ) @patch('django_rq.settings.QUEUES', RQ_QUEUES) def test_metrics_default(self): self.assertMetricsContain( { '# HELP rq_jobs RQ jobs by status', 'rq_jobs{queue="default",status="queued"} 0.0', 'rq_jobs{queue="default",status="started"} 0.0', 'rq_jobs{queue="default",status="finished"} 0.0', 'rq_jobs{queue="default",status="failed"} 0.0', 'rq_jobs{queue="default",status="deferred"} 0.0', 'rq_jobs{queue="default",status="scheduled"} 0.0', } ) @patch('django_rq.settings.QUEUES', RQ_QUEUES) def test_metrics_with_jobs(self): queue = get_queue('default') queue.enqueue(failing_job) for _ in range(10): queue.enqueue(access_self) worker = get_worker('default', name='test_worker') worker.register_birth() # override worker registration to effectively simulate non burst mode register_death = worker.register_death worker.register_birth = worker.register_death = lambda: None # type: ignore[method-assign] try: self.assertMetricsContain( { # job information '# HELP rq_jobs RQ jobs by status', 'rq_jobs{queue="default",status="queued"} 11.0', 'rq_jobs{queue="default",status="started"} 0.0', 'rq_jobs{queue="default",status="finished"} 0.0', 'rq_jobs{queue="default",status="failed"} 0.0', 'rq_jobs{queue="default",status="deferred"} 0.0', 'rq_jobs{queue="default",status="scheduled"} 0.0', # worker information '# HELP rq_workers RQ workers', 'rq_workers{name="test_worker",queues="default",state="?"} 1.0', '# HELP rq_job_successful_total RQ successful job count', 'rq_job_successful_total{name="test_worker",queues="default"} 0.0', '# HELP rq_job_failed_total RQ failed job count', 'rq_job_failed_total{name="test_worker",queues="default"} 0.0', '# HELP rq_working_seconds_total RQ total working time', 'rq_working_seconds_total{name="test_worker",queues="default"} 0.0', } ) worker.work(burst=True, max_jobs=4) self.assertMetricsContain( { # job information 'rq_jobs{queue="default",status="queued"} 7.0', 'rq_jobs{queue="default",status="finished"} 3.0', 'rq_jobs{queue="default",status="failed"} 1.0', # worker information 'rq_workers{name="test_worker",queues="default",state="idle"} 1.0', 'rq_job_successful_total{name="test_worker",queues="default"} 3.0', 'rq_job_failed_total{name="test_worker",queues="default"} 1.0', } ) worker.work(burst=True) self.assertMetricsContain( { # job information 'rq_jobs{queue="default",status="queued"} 0.0', 'rq_jobs{queue="default",status="finished"} 10.0', 'rq_jobs{queue="default",status="failed"} 1.0', # worker information 'rq_workers{name="test_worker",queues="default",state="idle"} 1.0', 'rq_job_successful_total{name="test_worker",queues="default"} 10.0', 'rq_job_failed_total{name="test_worker",queues="default"} 1.0', } ) finally: register_death() @skipIf(prometheus_client is not None, 'prometheus_client is installed') class NoPrometheusTest(TestCase): def test_no_metrics_without_prometheus_client(self): with self.assertRaises(NoReverseMatch): reverse('rq_metrics') django-rq-3.1/django_rq/tests/fixtures.py0000664000175000017500000000101215043301563020444 0ustar carstencarstenfrom rq import get_current_job from rq.job import Job from rq.worker import Worker from django_rq.queues import DjangoRQ class DummyJob(Job): pass class DummyQueue(DjangoRQ): """Just Fake class for the following test""" class DummyWorker(Worker): pass try: from rq_scheduler import Scheduler class DummyScheduler(Scheduler): pass except ImportError: pass def access_self(): job = get_current_job() assert job return job.id def failing_job(): raise ValueError django-rq-3.1/django_rq/tests/urls.py0000664000175000017500000000056215043301563017571 0ustar carstencarstenfrom django.contrib import admin from django.urls import path from django_rq.urls import urlpatterns as django_rq_urlpatterns from . import views urlpatterns = [ path('admin/', admin.site.urls), path('success/', views.success, name='success'), path('error/', views.error, name='error'), path('django-rq/', (django_rq_urlpatterns, '', 'django_rq')), ] django-rq-3.1/django_rq/tests/utils.py0000664000175000017500000000370115043301563017742 0ustar carstencarstenfrom typing import Any, Dict from unittest.mock import patch from django_rq.queues import get_connection, get_queue_by_index try: from redis.backoff import ExponentialWithJitterBackoff, NoBackoff # type: ignore[attr-defined] from redis.retry import Retry except ImportError: ExponentialWithJitterBackoff = None Retry = None # type: ignore[misc, assignment] def _is_buggy_retry(kwargs: Dict[str, Any]) -> bool: return ( Retry is not None and (retry := kwargs.get('retry')) is not None and isinstance(retry, Retry) and isinstance(retry._backoff, ExponentialWithJitterBackoff) # type: ignore[attr-defined] ) def get_queue_index(name='default'): """ Returns the position of Queue for the named queue in QUEUES_LIST """ connection = get_connection(name) connection_kwargs = connection.connection_pool.connection_kwargs for i in range(0, 100): try: q = get_queue_by_index(i) except AttributeError: continue if q.name == name: # assert that the connection is correct pool_kwargs = q.connection.connection_pool.connection_kwargs if not _is_buggy_retry(pool_kwargs) or not _is_buggy_retry(connection_kwargs): assert pool_kwargs == connection_kwargs else: # patch the retry backoff since there is a bug in the default # backoff strategy # # fixed in https://github.com/redis/redis-py/pull/3668 with patch.object( pool_kwargs['retry'], '_backoff', NoBackoff() ), patch.object( connection_kwargs['retry'], '_backoff', NoBackoff() ): assert pool_kwargs == connection_kwargs assert pool_kwargs['retry']._backoff.__dict__ == connection_kwargs['retry']._backoff.__dict__ return i return None django-rq-3.1/django_rq/tests/tests.py0000664000175000017500000011325615043301563017753 0ustar carstencarstenimport sys import datetime import time from typing import Any, cast, Dict, List from unittest import skipIf, mock from unittest.mock import patch, PropertyMock, MagicMock from uuid import uuid4 from django.conf import settings from django.core.management import call_command from django.test import TestCase, override_settings from django.urls import reverse from django.utils.safestring import SafeString from redis.exceptions import ConnectionError from rq import get_current_job, Queue import rq from rq.exceptions import NoSuchJobError from rq.job import Job from rq.registry import FinishedJobRegistry, ScheduledJobRegistry from rq.suspension import is_suspended from rq.worker import Worker from rq.serializers import DefaultSerializer, JSONSerializer from django_rq.decorators import job from django_rq.jobs import get_job_class from django_rq.management.commands import rqworker from django_rq.queues import ( get_connection, get_queue, get_queues, get_unique_connection_configs, DjangoRQ, get_redis_connection, ) from django_rq import thread_queue from django_rq.templatetags.django_rq import force_escape, to_localtime from django_rq.tests.fixtures import access_self, DummyJob, DummyQueue, DummyWorker from django_rq.utils import get_jobs, get_statistics, get_scheduler_pid from django_rq.workers import get_worker, get_worker_class try: from rq_scheduler import Scheduler from ..queues import get_scheduler from django_rq.tests.fixtures import DummyScheduler RQ_SCHEDULER_INSTALLED = True except ImportError: RQ_SCHEDULER_INSTALLED = False QUEUES = settings.RQ_QUEUES def divide(a, b): return a / b def long_running_job(timeout=10): time.sleep(timeout) return 'Done sleeping...' def flush_registry(registry): connection = registry.connection for job_id in registry.get_job_ids(): connection.zrem(registry.key, job_id) try: job = Job.fetch(job_id, connection=connection) job.delete() except NoSuchJobError: pass class RqStatsTest(TestCase): def test_get_connection_default(self): """ Test that rqstats returns the right statistics """ # Override testing RQ_QUEUES queues = [ { 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': 'default', } ] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): # Only to make sure it doesn't crash call_command('rqstats') call_command('rqstats', '-j') call_command('rqstats', '-y') @override_settings(RQ={'AUTOCOMMIT': True}) class QueuesTest(TestCase): def setUp(self): """Used to test with / without sentry_sdk available.""" self.mock_sdk = mock.MagicMock() self.mock_sdk.Hub.current.client.options = {} sys.modules["sentry_sdk"] = self.mock_sdk def tearDown(self): del sys.modules["sentry_sdk"] def test_get_connection_default(self): """ Test that get_connection returns the right connection based for `default` queue. """ config = QUEUES['default'] connection = get_connection() connection_kwargs = connection.connection_pool.connection_kwargs self.assertEqual(connection_kwargs['host'], config['HOST']) self.assertEqual(connection_kwargs['port'], config['PORT']) self.assertEqual(connection_kwargs['db'], config['DB']) def test_get_connection_test(self): """ Test that get_connection returns the right connection based for `test` queue. """ config = QUEUES['test'] connection = get_connection('test') connection_kwargs = connection.connection_pool.connection_kwargs self.assertEqual(connection_kwargs['host'], config['HOST']) self.assertEqual(connection_kwargs['port'], config['PORT']) self.assertEqual(connection_kwargs['db'], config['DB']) @patch('django_rq.queues.Sentinel') def test_get_connection_sentinel(self, sentinel_class_mock): """ Test that get_connection returns the right connection based for `sentinel` queue. """ sentinel_mock = MagicMock() sentinel_mock.master_for.return_value = sentinel_mock sentinel_class_mock.side_effect = [sentinel_mock] config = QUEUES['sentinel'] connection = get_connection('sentinel') self.assertEqual(connection, sentinel_mock) self.assertEqual(sentinel_mock.master_for.call_count, 1) self.assertEqual(sentinel_class_mock.call_count, 1) sentinel_instances = sentinel_class_mock.call_args[0][0] self.assertListEqual(config['SENTINELS'], sentinel_instances) connection_kwargs = sentinel_mock.master_for.call_args[1] self.assertEqual(connection_kwargs['service_name'], config['MASTER_NAME']) @patch('django_rq.queues.Sentinel') def test_sentinel_class_initialized_with_kw_args(self, sentinel_class_mock): """ Test that Sentinel object is initialized with proper connection kwargs. """ config = { 'SENTINELS': [], 'MASTER_NAME': 'test_master', 'SOCKET_TIMEOUT': 0.2, 'DB': 0, 'USERNAME': 'redis-user', 'PASSWORD': 'redis-pass', 'CONNECTION_KWARGS': {'ssl': False}, 'SENTINEL_KWARGS': {'username': 'sentinel-user', 'password': 'sentinel-pass', 'socket_timeout': 0.3}, } get_redis_connection(config) sentinel_init_sentinel_kwargs = sentinel_class_mock.call_args[1] self.assertDictEqual( sentinel_init_sentinel_kwargs, {'db': 0, 'username': 'redis-user', 'password': 'redis-pass', 'socket_timeout': 0.2, 'ssl': False, 'sentinel_kwargs': {'username': 'sentinel-user', 'password': 'sentinel-pass', 'socket_timeout': 0.3}} ) def test_get_queue_default(self): """ Test that get_queue use the right parameters for `default` connection. """ config = QUEUES['default'] queue = get_queue('default') connection_kwargs = queue.connection.connection_pool.connection_kwargs self.assertEqual(queue.name, 'default') self.assertEqual(connection_kwargs['host'], config['HOST']) self.assertEqual(connection_kwargs['port'], config['PORT']) self.assertEqual(connection_kwargs['db'], config['DB']) def test_get_queue_url(self): """ Test that get_queue use the right parameters for queues using URL for connection. """ config = QUEUES['url'] queue = get_queue('url') connection_kwargs = queue.connection.connection_pool.connection_kwargs self.assertEqual(queue.name, 'url') self.assertEqual(connection_kwargs['host'], 'host') self.assertEqual(connection_kwargs['port'], 1234) self.assertEqual(connection_kwargs['db'], 4) self.assertEqual(connection_kwargs['password'], 'password') def test_get_queue_url_with_db(self): """ Test that get_queue use the right parameters for queues using URL for connection, where URL contains the db number (either as querystring or path segment). """ config = QUEUES['url_with_db'] queue = get_queue('url_with_db') connection_kwargs = queue.connection.connection_pool.connection_kwargs self.assertEqual(queue.name, 'url_with_db') self.assertEqual(connection_kwargs['host'], 'host') self.assertEqual(connection_kwargs['port'], 1234) self.assertEqual(connection_kwargs['db'], 5) self.assertEqual(connection_kwargs['password'], 'password') def test_get_queue_url_with_db_default(self): """ Test that get_queue use the right parameters for queues using URL for connection, where no DB given and URL does not contain the db number (redis-py defaults to 0, should not break). """ queue = get_queue('url_default_db') connection_kwargs = queue.connection.connection_pool.connection_kwargs self.assertEqual(queue.name, 'url_default_db') self.assertEqual(connection_kwargs['host'], 'host') self.assertEqual(connection_kwargs['port'], 1234) self.assertEqual(connection_kwargs['db'], None) self.assertEqual(connection_kwargs['password'], 'password') def test_get_queue_test(self): """ Test that get_queue use the right parameters for `test` connection. """ config = QUEUES['test'] queue = get_queue('test') connection_kwargs = queue.connection.connection_pool.connection_kwargs self.assertEqual(queue.name, 'test') self.assertEqual(connection_kwargs['host'], config['HOST']) self.assertEqual(connection_kwargs['port'], config['PORT']) self.assertEqual(connection_kwargs['db'], config['DB']) def test_get_queues_same_connection(self): """ Checks that getting queues with the same redis connection is ok. """ self.assertEqual(get_queues('test', 'test2'), [get_queue('test'), get_queue('test2')]) def test_get_queues_different_connections(self): """ Checks that getting queues with different redis connections raise an exception. """ self.assertRaises(ValueError, get_queues, 'default', 'test') def test_get_queues_different_classes(self): """ Checks that getting queues with different classes (defined in configuration) raises an exception. """ self.assertRaises(ValueError, get_queues, 'test', 'test1') def test_pass_queue_via_commandline_args(self): """ Checks that passing queues via commandline arguments works """ queue_names = ['django_rq_test', 'django_rq_test2'] jobs: List[Any] = [] for queue_name in queue_names: queue = get_queue(queue_name) jobs.append( { 'job': queue.enqueue(divide, 42, 1), 'finished_job_registry': FinishedJobRegistry(queue.name, queue.connection), } ) call_command('rqworker', *queue_names, burst=True) for job in jobs: self.assertTrue(job['job'].is_finished) self.assertIn(job['job'].id, job['finished_job_registry'].get_job_ids()) # Test with rqworker-pool command jobs: List[Any] = [] for queue_name in queue_names: queue = get_queue(queue_name) jobs.append( { 'job': queue.enqueue(divide, 42, 1), 'finished_job_registry': FinishedJobRegistry(queue.name, queue.connection), } ) call_command('rqworker-pool', *queue_names, burst=True) for job in jobs: self.assertTrue(job['job'].is_finished) self.assertIn(job['job'].id, job['finished_job_registry'].get_job_ids()) def test_configure_sentry(self): rqworker.configure_sentry('https://1@sentry.io/1') self.mock_sdk.init.assert_called_once_with( 'https://1@sentry.io/1', ca_certs=None, debug=False, integrations=[ self.mock_sdk.integrations.redis.RedisIntegration(), self.mock_sdk.integrations.rq.RqIntegration(), self.mock_sdk.integrations.django.DjangoIntegration(), ], ) def test_configure_sentry__options(self): """Check that debug and ca_certs can be passed through to Sentry.""" rqworker.configure_sentry('https://1@sentry.io/1', sentry_debug=True, sentry_ca_certs='/certs') self.mock_sdk.init.assert_called_once_with( 'https://1@sentry.io/1', ca_certs='/certs', debug=True, integrations=[ self.mock_sdk.integrations.redis.RedisIntegration(), self.mock_sdk.integrations.rq.RqIntegration(), self.mock_sdk.integrations.django.DjangoIntegration(), ], ) def test_sentry_dsn(self): """Check that options are passed to configure_sentry as expected.""" queue_names = ['django_rq_test'] call_command( 'rqworker', *queue_names, burst=True, sentry_dsn='https://1@sentry.io/1', sentry_debug=True, sentry_ca_certs='/certs' ) self.mock_sdk.init.assert_called_once_with( 'https://1@sentry.io/1', ca_certs='/certs', debug=True, integrations=[ self.mock_sdk.integrations.redis.RedisIntegration(), self.mock_sdk.integrations.rq.RqIntegration(), self.mock_sdk.integrations.django.DjangoIntegration(), ], ) @mock.patch('django_rq.management.commands.rqworker.configure_sentry') def test_sentry_dsn__noop(self, mocked): """Check that sentry is ignored if sentry_dsn is not passed in.""" queue_names = ['django_rq_test'] call_command('rqworker', *queue_names, burst=True, sentry_debug=True, sentry_ca_certs='/certs') self.assertEqual(mocked.call_count, 0) @mock.patch('django_rq.management.commands.rqworker.configure_sentry') def test_sentry_sdk_import_error(self, mocked): """Check the command handles import errors as expected.""" mocked.side_effect = ImportError queue_names = ['django_rq_test'] with self.assertRaises(SystemExit): call_command('rqworker', *queue_names, burst=True, sentry_dsn='https://1@sentry.io/1') # @mock.patch('django_rq.management.commands.rqworker.Connection') # def test_connection_error(self, mocked): # """Check that redis ConnectionErrors are handled correctly.""" # mocked.side_effect = ConnectionError("Unable to connect") # queue_names = ['django_rq_test'] # with self.assertRaises(SystemExit): # call_command('rqworker', *queue_names) def test_get_unique_connection_configs(self): connection_params_1 = { 'HOST': 'localhost', 'PORT': 6379, 'DB': 0, } connection_params_2 = { 'HOST': 'localhost', 'PORT': 6379, 'DB': 1, } config = {'default': connection_params_1, 'test': connection_params_2} unique_configs = get_unique_connection_configs(config) self.assertEqual(len(unique_configs), 2) self.assertIn(connection_params_1, unique_configs) self.assertIn(connection_params_2, unique_configs) # self.assertEqual(get_unique_connection_configs(config), # [connection_params_1, connection_params_2]) config = {'default': connection_params_1, 'test': connection_params_1} # Should return one connection config since it filters out duplicates self.assertEqual(get_unique_connection_configs(config), [connection_params_1]) def test_get_unique_connection_configs_with_different_timeout(self): connection_params_1 = { 'HOST': 'localhost', 'PORT': 6379, 'DB': 0, } connection_params_2 = { 'HOST': 'localhost', 'PORT': 6379, 'DB': 1, } queue_params_a = dict(connection_params_1) queue_params_b = dict(connection_params_2) queue_params_c = dict(connection_params_2) queue_params_c["DEFAULT_TIMEOUT"] = 1 config = { 'default': queue_params_a, 'test_b': queue_params_b, 'test_c': queue_params_c, } unique_configs = get_unique_connection_configs(config) self.assertEqual(len(unique_configs), 2) self.assertIn(connection_params_1, unique_configs) self.assertIn(connection_params_2, unique_configs) def test_async(self): """ Checks whether asynchronous settings work """ # Make sure is_async is not set by default default_queue = get_queue('default') self.assertTrue(default_queue._is_async) # Make sure is_async override works default_queue_is_async = get_queue('default', is_async=False) self.assertFalse(default_queue_is_async._is_async) # Make sure old keyword argument 'async' works for backwards # compatibility with code expecting older versions of rq or django-rq. # Note 'async' is a reserved keyword in Python >= 3.7. default_queue_async = get_queue('default', **cast(Dict[str, Any], {'async': False})) self.assertFalse(default_queue_async._is_async) # Make sure is_async setting works async_queue = get_queue('async') self.assertFalse(async_queue._is_async) @override_settings(RQ={'AUTOCOMMIT': False}) def test_autocommit(self): """ Checks whether autocommit is set properly. """ queue = get_queue(autocommit=True) self.assertTrue(queue._autocommit) queue = get_queue(autocommit=False) self.assertFalse(queue._autocommit) # Falls back to default AUTOCOMMIT mode queue = get_queue() self.assertFalse(queue._autocommit) queues = get_queues(autocommit=True) self.assertTrue(queues[0]._autocommit) queues = get_queues(autocommit=False) self.assertFalse(queues[0]._autocommit) queues = get_queues() self.assertFalse(queues[0]._autocommit) def test_default_timeout(self): """Ensure DEFAULT_TIMEOUT are properly parsed.""" queue = get_queue() self.assertEqual(queue._default_timeout, 500) queue = get_queue('test1') self.assertEqual(queue._default_timeout, 400) def test_get_queue_serializer(self): """ Test that the correct serializer is set on the queue. """ queue = get_queue('test_serializer') self.assertEqual(queue.name, 'test_serializer') self.assertEqual(queue.serializer, rq.serializers.JSONSerializer) @override_settings(RQ={'AUTOCOMMIT': True}) class DecoratorTest(TestCase): def test_job_decorator(self): # Ensure that decorator passes in the right queue from settings.py queue_name = 'test3' config = QUEUES[queue_name] @job(queue_name) def test(): pass result = test.delay() queue = get_queue(queue_name) self.assertEqual(result.origin, queue_name) result.delete() def test_job_decorator_default(self): # Ensure that decorator passes in the right queue from settings.py @job def test(): pass result = test.delay() self.assertEqual(result.origin, 'default') result.delete() @override_settings(RQ={'AUTOCOMMIT': True, 'DEFAULT_RESULT_TTL': 60}) def test_job_decorator_with_result_ttl(self): # Ensure that decorator result_ttl override the queue DEFAULT_RESULT_TTL and # RQ DEFAULT_RESULT_TTL when available queue_name = 'test3' config = QUEUES[queue_name] @job(queue_name, result_ttl=674) def test(): pass result = test.delay() self.assertEqual(result.result_ttl, 674) self.assertNotEqual(config['DEFAULT_RESULT_TTL'], 674) result.delete() @override_settings(RQ={'AUTOCOMMIT': True, 'DEFAULT_RESULT_TTL': 60}) def test_job_decorator_queue_result_ttl(self): # Ensure the queue DEFAULT_RESULT_TTL is used when the result_ttl is not passed queue_name = 'test3' config = QUEUES[queue_name] @job(queue_name) def test(): pass result = test.delay() self.assertEqual(result.result_ttl, config['DEFAULT_RESULT_TTL']) self.assertNotEqual(config['DEFAULT_RESULT_TTL'], 60) result.delete() @override_settings(RQ={'AUTOCOMMIT': True, 'DEFAULT_RESULT_TTL': 60}) def test_job_decorator_queue_without_result_ttl(self): # Ensure the RQ DEFAULT_RESULT_TTL is used when the result_ttl is not passed and # the queue does not have it either queue_name = 'django_rq_test' config = QUEUES[queue_name] @job(queue_name) def test(): pass result = test.delay() self.assertIsNone(config.get('DEFAULT_RESULT_TTL')) self.assertEqual(result.result_ttl, 60) result.delete() def test_job_decorator_default_queue_result_ttl(self): # Ensure the default queue DEFAULT_RESULT_TTL is used when queue name is not passed @job def test(): pass result = test.delay() self.assertEqual(result.result_ttl, QUEUES['default']['DEFAULT_RESULT_TTL']) result.delete() @override_settings(RQ={'AUTOCOMMIT': True}) class WorkersTest(TestCase): def test_get_worker_default(self): """ By default, ``get_worker`` should return worker for ``default`` queue. """ worker = get_worker() queue = worker.queues[0] self.assertEqual(queue.name, 'default') def test_get_worker_specified(self): """ Checks if a worker with specified queues is created when queue names are given. """ w = get_worker('test3') self.assertEqual(len(w.queues), 1) queue = w.queues[0] self.assertEqual(queue.name, 'test3') def test_get_worker_custom_classes(self): w = get_worker( job_class='django_rq.tests.fixtures.DummyJob', queue_class='django_rq.tests.fixtures.DummyQueue', worker_class='django_rq.tests.fixtures.DummyWorker', ) self.assertIs(w.job_class, DummyJob) self.assertIsInstance(w.queues[0], DummyQueue) self.assertIsInstance(w, DummyWorker) def test_get_worker_custom_serializer(self): w = get_worker( serializer='rq.serializers.JSONSerializer', ) self.assertEqual(w.serializer, JSONSerializer) def test_get_worker_default_serializer(self): w = get_worker() self.assertEqual(w.serializer, DefaultSerializer) def test_get_current_job(self): """ Ensure that functions using RQ's ``get_current_job`` doesn't fail when run from rqworker (the job id is not in the failed queue). """ queue = get_queue() job = queue.enqueue(access_self) call_command('rqworker', '--burst') failed_queue = Queue(name='failed', connection=queue.connection) self.assertFalse(job.id in failed_queue.job_ids) job.delete() @patch('django_rq.management.commands.rqworker.setup_loghandlers') def test_commandline_verbosity_affects_logging_level(self, setup_loghandlers_mock): expected_level = { 0: 'WARNING', 1: 'INFO', 2: 'DEBUG', 3: 'DEBUG', } for verbosity in [0, 1, 2, 3]: setup_loghandlers_mock.reset_mock() call_command('rqworker', verbosity=verbosity, burst=True) setup_loghandlers_mock.assert_called_once_with(expected_level[verbosity]) class ThreadQueueTest(TestCase): @override_settings(RQ={'AUTOCOMMIT': True}) def test_enqueue_autocommit_on(self): """ Running ``enqueue`` when AUTOCOMMIT is on should immediately persist job into Redis. """ queue = get_queue() job = queue.enqueue(divide, 1, 1) self.assertTrue(job.id in queue.job_ids) job.delete() @override_settings(RQ={'AUTOCOMMIT': False}) def test_enqueue_autocommit_off(self): """ Running ``enqueue`` when AUTOCOMMIT is off should put the job in the delayed queue instead of enqueueing it right away. """ queue = get_queue() job = queue.enqueue(divide, 1, b=1) self.assertTrue(job is None) delayed_queue = thread_queue.get_queue() self.assertEqual(delayed_queue[0][0], queue) self.assertEqual(delayed_queue[0][1], ()) kwargs = delayed_queue[0][2] self.assertEqual(kwargs['args'], (1,)) self.assertEqual(kwargs['result_ttl'], None) self.assertEqual(kwargs['kwargs'], {'b': 1}) self.assertEqual(kwargs['func'], divide) self.assertEqual(kwargs['timeout'], None) def test_commit(self): """ Ensure that commit_delayed_jobs properly enqueue jobs and clears delayed_queue. """ queue = get_queue() delayed_queue = thread_queue.get_queue() queue.empty() self.assertEqual(queue.count, 0) queue.enqueue_call(divide, args=(1,), kwargs={'b': 1}) thread_queue.commit() self.assertEqual(queue.count, 1) self.assertEqual(len(delayed_queue), 0) def test_clear(self): queue = get_queue() delayed_queue = thread_queue.get_queue() delayed_queue.append((queue, divide, (1,), {'b': 1})) thread_queue.clear() delayed_queue = thread_queue.get_queue() self.assertEqual(delayed_queue, []) @override_settings(RQ={'AUTOCOMMIT': False}) def test_success(self): queue = get_queue() queue.empty() thread_queue.clear() self.assertEqual(queue.count, 0) self.client.get(reverse('success')) self.assertEqual(queue.count, 1) @override_settings(RQ={'AUTOCOMMIT': False}) def test_error(self): queue = get_queue() queue.empty() self.assertEqual(queue.count, 0) url = reverse('error') self.assertRaises(ValueError, self.client.get, url) self.assertEqual(queue.count, 0) @skipIf(RQ_SCHEDULER_INSTALLED is False, 'RQ Scheduler not installed') class SchedulerTest(TestCase): def test_get_scheduler(self): """ Ensure get_scheduler creates a scheduler instance with the right connection params for `test` queue. """ config = QUEUES['test'] scheduler = get_scheduler('test') connection_kwargs = scheduler.connection.connection_pool.connection_kwargs self.assertEqual(scheduler.queue_name, 'test') self.assertEqual(connection_kwargs['host'], config['HOST']) self.assertEqual(connection_kwargs['port'], config['PORT']) self.assertEqual(connection_kwargs['db'], config['DB']) def test_get_scheduler_custom_connection(self): """ Ensure get_scheduler respects the `connection` argument. """ with get_connection('test') as connection: scheduler = get_scheduler('test', connection=connection) self.assertIs(scheduler.connection, connection) @patch('django_rq.management.commands.rqscheduler.get_scheduler') @patch('django_rq.management.commands.rqscheduler.setup_loghandlers') def test_commandline_verbosity_affects_logging_level(self, setup_loghandlers_mock, get_scheduler_mock): get_scheduler_mock.run.return_value = None expected_level = { 0: 'WARNING', 1: 'INFO', 2: 'DEBUG', 3: 'DEBUG', } for verbosity in [0, 1, 2, 3]: setup_loghandlers_mock.reset_mock() call_command('rqscheduler', verbosity=verbosity) setup_loghandlers_mock.assert_called_once_with(expected_level[verbosity]) @override_settings(RQ={'SCHEDULER_CLASS': 'django_rq.tests.fixtures.DummyScheduler'}) def test_scheduler_default(self): """ Scheduler class customization. """ scheduler = get_scheduler('default') self.assertIsInstance(scheduler, DummyScheduler) @override_settings(RQ={'AUTOCOMMIT': True}) def test_scheduler_default_timeout(self): """ Ensure scheduler respects DEFAULT_RESULT_TTL value for `result_ttl` param. """ scheduler = get_scheduler('test_scheduler') job = scheduler.enqueue_at(datetime.datetime.now() + datetime.timedelta(days=1), divide, 1, 1) self.assertTrue(job in scheduler.get_jobs()) self.assertEqual(job.timeout, 400) job.delete() @override_settings(RQ={'AUTOCOMMIT': True, 'DEFAULT_RESULT_TTL': 5432}) def test_scheduler_default_result_ttl(self): """ Ensure scheduler respects DEFAULT_RESULT_TTL value for `result_ttl` param. """ scheduler = get_scheduler('test_scheduler') job = scheduler.enqueue_at(datetime.datetime.now() + datetime.timedelta(days=1), divide, 1, 1) self.assertTrue(job in scheduler.get_jobs()) self.assertEqual(job.result_ttl, 5432) job.delete() class RedisCacheTest(TestCase): @skipIf(settings.REDIS_CACHE_TYPE != 'django-redis', 'django-redis not installed') @patch('django_redis.get_redis_connection') def test_get_queue_django_redis(self, mocked): """ Test that the USE_REDIS_CACHE option for configuration works. """ queue = get_queue('django-redis') queue.enqueue(access_self) self.assertEqual(len(queue), 1) self.assertEqual(mocked.call_count, 1) @skipIf(settings.REDIS_CACHE_TYPE != 'django-redis-cache', 'django-redis-cache not installed') def test_get_queue_django_redis_cache(self): """ Test that the USE_REDIS_CACHE option for configuration works. """ queueName = 'django-redis-cache' queue = get_queue(queueName) connection_kwargs = queue.connection.connection_pool.connection_kwargs self.assertEqual(queue.name, queueName) cacheHost = settings.CACHES[queueName]['LOCATION'].split(':')[0] cachePort = settings.CACHES[queueName]['LOCATION'].split(':')[1] cacheDBNum = settings.CACHES[queueName]['OPTIONS']['DB'] self.assertEqual(connection_kwargs['host'], cacheHost) self.assertEqual(connection_kwargs['port'], int(cachePort)) self.assertEqual(connection_kwargs['db'], int(cacheDBNum)) self.assertEqual(connection_kwargs['password'], None) class JobClassTest(TestCase): def test_default_job_class(self): job_class = get_job_class() self.assertIs(job_class, Job) @override_settings(RQ={'JOB_CLASS': 'django_rq.tests.fixtures.DummyJob'}) def test_custom_class(self): job_class = get_job_class() self.assertIs(job_class, DummyJob) def test_local_override(self): self.assertIs(get_job_class('django_rq.tests.fixtures.DummyJob'), DummyJob) class SuspendResumeTest(TestCase): def test_suspend_and_resume_commands(self): connection = get_connection() self.assertEqual(is_suspended(connection), 0) call_command('rqsuspend') self.assertEqual(is_suspended(connection), 1) call_command('rqresume') self.assertEqual(is_suspended(connection), 0) class QueueClassTest(TestCase): def test_default_queue_class(self): queue = get_queue('test') self.assertIsInstance(queue, DjangoRQ) def test_for_queue(self): queue = get_queue('test1') self.assertIsInstance(queue, DummyQueue) def test_in_kwargs(self): queue = get_queue('test', queue_class=DummyQueue) self.assertIsInstance(queue, DummyQueue) class WorkerClassTest(TestCase): def test_default_worker_class(self): worker = get_worker() self.assertIsInstance(worker, Worker) @override_settings(RQ={'WORKER_CLASS': 'django_rq.tests.fixtures.DummyWorker'}) def test_custom_class(self): worker = get_worker() self.assertIsInstance(worker, DummyWorker) def test_local_override(self): self.assertIs(get_worker_class('django_rq.tests.fixtures.DummyWorker'), DummyWorker) @override_settings(RQ={'AUTOCOMMIT': True}) class TemplateTagTest(TestCase): def test_to_localtime(self): with self.settings(TIME_ZONE='Asia/Jakarta'): queue = get_queue() job = queue.enqueue(access_self) time = to_localtime(job.created_at) self.assertIsNotNone(time.tzinfo) self.assertEqual(time.strftime("%z"), '+0700') def test_force_escape_safe_string(self): html = "

hello world

" safe_string = SafeString(html) escaped_string = force_escape(safe_string) expected = "<h1>hello world</h1>" self.assertEqual(escaped_string, expected) def test_force_escape_regular_string(self): html = "hello world" safe_string = SafeString(html) escaped_string = force_escape(safe_string) expected = "hello world" self.assertEqual(escaped_string, expected) class SchedulerPIDTest(TestCase): @skipIf(RQ_SCHEDULER_INSTALLED is False, 'RQ Scheduler not installed') def test_scheduler_scheduler_pid_active(self): test_queue = 'scheduler_scheduler_active_test' queues = [{ 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': test_queue, }] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): scheduler = get_scheduler(test_queue) scheduler.register_birth() self.assertIs(get_scheduler_pid(get_queue(scheduler.queue_name)), False) scheduler.register_death() @skipIf(RQ_SCHEDULER_INSTALLED is False, 'RQ Scheduler not installed') def test_scheduler_scheduler_pid_inactive(self): test_queue = 'scheduler_scheduler_inactive_test' queues = [{ 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': test_queue, }] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): connection = get_connection(test_queue) connection.flushall() # flush is needed to isolate from other tests scheduler = get_scheduler(test_queue) scheduler.remove_lock() scheduler.register_death() # will mark the scheduler as death so get_scheduler_pid will return None self.assertIs(get_scheduler_pid(get_queue(scheduler.queue_name)), False) @skipIf(RQ_SCHEDULER_INSTALLED is True, 'RQ Scheduler installed (no worker--with-scheduler)') def test_worker_scheduler_pid_active(self): '''The worker works as scheduler too if RQ Scheduler not installed, and the pid scheduler_pid is correct''' test_queue = 'worker_scheduler_active_test' queues = [{ 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': test_queue, }] with patch('rq.scheduler.RQScheduler.release_locks') as mock_release_locks: with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): queue = get_queue(test_queue) worker = get_worker(test_queue, name=uuid4().hex) worker.work(with_scheduler=True, burst=True) # force the worker to acquire a scheduler lock pid = get_scheduler_pid(queue) self.assertIsNotNone(pid) self.assertIsNot(pid, False) self.assertIsInstance(pid, int) @skipIf(RQ_SCHEDULER_INSTALLED is True, 'RQ Scheduler installed (no worker--with-scheduler)') def test_worker_scheduler_pid_inactive(self): '''The worker works as scheduler too if RQ Scheduler not installed, and the pid scheduler_pid is correct''' test_queue = 'worker_scheduler_inactive_test' queues = [{ 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': test_queue, }] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): worker = get_worker(test_queue, name=uuid4().hex) worker.work(with_scheduler=False, burst=True) # worker will not acquire lock, scheduler_pid should return None self.assertIsNone(get_scheduler_pid(worker.queues[0])) class UtilsTest(TestCase): def test_get_statistics(self): """get_statistics() returns the right number of workers""" queues = [ { 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': 'async', } ] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): worker = get_worker('async', name=uuid4().hex) worker.register_birth() statistics = get_statistics() data = statistics['queues'][0] self.assertEqual(data['name'], 'async') self.assertEqual(data['workers'], 1) worker.register_death() def test_get_jobs(self): """get_jobs() works properly""" queue = get_queue('django_rq_test') registry = ScheduledJobRegistry(queue.name, queue.connection) flush_registry(registry) now = datetime.datetime.now() job = queue.enqueue_at(now, access_self) job2 = queue.enqueue_at(now, access_self) self.assertEqual(get_jobs(queue, [job.id, job2.id]), [job, job2]) self.assertEqual(len(registry), 2) # job has been deleted, so the result will be filtered out queue.connection.delete(job.key) self.assertEqual(get_jobs(queue, [job.id, job2.id]), [job2]) self.assertEqual(len(registry), 2) # If job has been deleted and `registry` is passed, # job will also be removed from registry queue.connection.delete(job2.key) self.assertEqual(get_jobs(queue, [job.id, job2.id], registry), []) self.assertEqual(len(registry), 0) django-rq-3.1/django_rq/migrations/0000775000175000017500000000000015043301563017241 5ustar carstencarstendjango-rq-3.1/django_rq/migrations/__init__.py0000664000175000017500000000000015043301563021340 0ustar carstencarstendjango-rq-3.1/django_rq/migrations/0001_initial.py0000664000175000017500000000170715043301563021711 0ustar carstencarstenfrom django.db import migrations, models class Migration(migrations.Migration): """Create Django contenttype for queue""" initial = True dependencies = [] operations = [ migrations.CreateModel( name='Queue', fields=[ # Does not create any table / fields in the database # Registers the Queue model as migrated ( 'id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID' ) ), ], options={ # Enables the Django contenttype framework for django_rq 'permissions': [['view', 'Access admin page']], 'managed': False, 'default_permissions': (), }, ), ] django-rq-3.1/django_rq/models.py0000664000175000017500000000123315043301563016721 0ustar carstencarstenfrom django.core.signals import got_request_exception, request_finished from django.db import models from . import thread_queue from .queues import get_commit_mode # If we're not in AUTOCOMMIT mode, wire up request finished/exception signal if not get_commit_mode(): request_finished.connect(thread_queue.commit) got_request_exception.connect(thread_queue.clear) class Queue(models.Model): """Placeholder model with no database table, but with django admin page and contenttype permission""" class Meta: managed = False # not in Django's database default_permissions = () permissions = [['view', 'Access admin page']] django-rq-3.1/django_rq/management/0000775000175000017500000000000015043301563017201 5ustar carstencarstendjango-rq-3.1/django_rq/management/__init__.py0000664000175000017500000000000015043301563021300 0ustar carstencarstendjango-rq-3.1/django_rq/management/commands/0000775000175000017500000000000015043301563021002 5ustar carstencarstendjango-rq-3.1/django_rq/management/commands/rqsuspend.py0000664000175000017500000000222515043301563023401 0ustar carstencarstenimport logging import sys from django.core.management.base import BaseCommand from rq.suspension import suspend from ...queues import get_connection log = logging.getLogger(__name__) class Command(BaseCommand): help = "Suspend all queues." def add_arguments(self, parser): parser.add_argument( "--duration", "-d", type=int, help="The duration in seconds to suspend the workers. If not provided, workers will be suspended indefinitely", ) def handle(self, *args, **options): connection = get_connection() duration = options.get("duration") if duration is not None and duration < 1: log.error("Duration must be an integer greater than 1") sys.exit(1) if duration: suspend(connection, duration) msg = f"Suspending workers for {duration} seconds. No new jobs will be started during that time, but then will automatically resume" log.info(msg) else: suspend(connection) log.info("Suspending workers. No new jobs will be started. But current jobs will be completed") django-rq-3.1/django_rq/management/commands/__init__.py0000664000175000017500000000000015043301563023101 0ustar carstencarstendjango-rq-3.1/django_rq/management/commands/rqworker.py0000664000175000017500000001112615043301563023231 0ustar carstencarstenimport os import sys from redis.exceptions import ConnectionError from rq.logutils import setup_loghandlers from django.core.management.base import BaseCommand from ...workers import get_worker from ...utils import configure_sentry, reset_db_connections class Command(BaseCommand): """ Runs RQ workers on specified queues. Note that all queues passed into a single rqworker command must share the same connection. Example usage: python manage.py rqworker high medium low """ args = '' def add_arguments(self, parser): parser.add_argument('--worker-class', action='store', dest='worker_class', help='RQ Worker class to use') parser.add_argument('--pid', action='store', dest='pid', default=None, help='PID file to write the worker`s pid into') parser.add_argument('--burst', action='store_true', dest='burst', default=False, help='Run worker in burst mode') parser.add_argument('--with-scheduler', action='store_true', dest='with_scheduler', default=False, help='Run worker with scheduler enabled') parser.add_argument('--name', action='store', dest='name', default=None, help='Name of the worker') parser.add_argument('--queue-class', action='store', dest='queue_class', help='Queues class to use') parser.add_argument('--job-class', action='store', dest='job_class', help='Jobs class to use') parser.add_argument('--worker-ttl', action='store', type=int, dest='worker_ttl', default=420, help='Default worker timeout to be used') parser.add_argument('--sentry-dsn', action='store', default=None, dest='sentry_dsn', help='Report exceptions to this Sentry DSN') parser.add_argument('--sentry-ca-certs', action='store', default=None, dest='sentry_ca_certs', help='A path to an alternative CA bundle file in PEM-format') parser.add_argument('--sentry-debug', action='store', default=False, dest='sentry_debug', help='Turns debug mode on or off.') parser.add_argument('--max-jobs', action='store', default=None, dest='max_jobs', type=int, help='Maximum number of jobs to execute') parser.add_argument('--max-idle-time', action='store', default=None, dest='max_idle_time', type=int, help='Seconds to wait for job before shutting down') parser.add_argument('--serializer', action='store', default='rq.serializers.DefaultSerializer', dest='serializer', help='Specify a custom Serializer.') parser.add_argument('args', nargs='*', type=str, help='The queues to work on, separated by space') def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) # Verbosity is defined by default in BaseCommand for all commands verbosity = options['verbosity'] if verbosity >= 2: level = 'DEBUG' elif verbosity == 0: level = 'WARNING' else: level = 'INFO' setup_loghandlers(level) sentry_dsn = options.pop('sentry_dsn') if sentry_dsn: try: configure_sentry(sentry_dsn, **options) except ImportError: self.stderr.write("Please install sentry-sdk using `pip install sentry-sdk`") sys.exit(1) try: # Instantiate a worker worker_kwargs = { 'worker_class': options['worker_class'], 'queue_class': options['queue_class'], 'job_class': options['job_class'], 'name': options['name'], 'worker_ttl': options['worker_ttl'], 'serializer': options['serializer'] } w = get_worker(*args, **worker_kwargs) # Close any opened DB connection before any fork reset_db_connections() w.work( burst=options.get('burst', False), with_scheduler=options.get('with_scheduler', False), logging_level=level, max_jobs=options['max_jobs'], max_idle_time=options['max_idle_time'] ) except ConnectionError as e: self.stderr.write(str(e)) sys.exit(1) django-rq-3.1/django_rq/management/commands/rqscheduler.py0000664000175000017500000000324015043301563023674 0ustar carstencarstenimport os from django.core.exceptions import ImproperlyConfigured from django.core.management.base import BaseCommand from ... import get_scheduler try: from rq_scheduler.utils import setup_loghandlers except ImportError: def setup_loghandlers(*args, **kwargs): raise ImproperlyConfigured('rq_scheduler not installed') class Command(BaseCommand): """ Runs RQ scheduler """ help = __doc__ args = '' def add_arguments(self, parser): parser.add_argument('--pid', action='store', dest='pid', default=None, help='PID file to write the scheduler`s pid into') parser.add_argument('--interval', '-i', type=int, dest='interval', default=60, help="""How often the scheduler checks for new jobs to add to the queue (in seconds).""") parser.add_argument('--queue', dest='queue', default='default', help="Name of the queue used for scheduling.",) parser.add_argument('args', nargs='*') def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) # Verbosity is defined by default in BaseCommand for all commands verbosity: int = options['verbosity'] if verbosity >= 2: level = 'DEBUG' elif verbosity == 0: level = 'WARNING' else: level = 'INFO' setup_loghandlers(level) scheduler = get_scheduler( name=options['queue'], interval=options['interval']) scheduler.run() django-rq-3.1/django_rq/management/commands/rqenqueue.py0000664000175000017500000000173015043301563023367 0ustar carstencarstenfrom django.core.management.base import BaseCommand from ... import get_queue class Command(BaseCommand): """ Queue a function with the given arguments. """ help = __doc__ args = '' def add_arguments(self, parser): parser.add_argument('--queue', '-q', dest='queue', default='default', help='Specify the queue [default]') parser.add_argument('--timeout', '-t', type=int, dest='timeout', help='A timeout in seconds') parser.add_argument('args', nargs='*') def handle(self, *args, **options): """ Queues the function given with the first argument with the parameters given with the rest of the argument list. """ queue = get_queue(options['queue']) job = queue.enqueue_call(args[0], args=args[1:], timeout=options['timeout']) if options['verbosity']: print('Job %s created' % job.id) django-rq-3.1/django_rq/management/commands/rqresume.py0000664000175000017500000000044315043301563023220 0ustar carstencarstenfrom django.core.management.base import BaseCommand from rq.suspension import resume from ...queues import get_connection class Command(BaseCommand): help = "Resume all queues." def handle(self, *args, **options): connection = get_connection() resume(connection) django-rq-3.1/django_rq/management/commands/rqworker-pool.py0000664000175000017500000001077415043301563024210 0ustar carstencarstenimport os import sys from rq.serializers import resolve_serializer from rq.worker_pool import WorkerPool from rq.logutils import setup_loghandlers from typing import cast from django.core.management.base import BaseCommand from ...jobs import get_job_class from ...utils import configure_sentry from ...queues import get_queues from ...workers import get_worker_class class Command(BaseCommand): """ Runs RQ pool with x number of workers on specified queues. Note that all queues passed into a single rqworker-pool command must share the same connection. Example usage: python manage.py rqworker-pool high medium low --num-workers 4 """ args = '' def add_arguments(self, parser): parser.add_argument('--num-workers', action='store', dest='num_workers', type=int, default=1, help='Number of workers to spawn') parser.add_argument('--worker-class', action='store', dest='worker_class', help='RQ Worker class to use') parser.add_argument('--pid', action='store', dest='pid', default=None, help='PID file to write the worker`s pid into') parser.add_argument('--burst', action='store_true', dest='burst', default=False, help='Run worker in burst mode') parser.add_argument('--queue-class', action='store', dest='queue_class', help='Queues class to use') parser.add_argument('--job-class', action='store', dest='job_class', help='Jobs class to use') parser.add_argument('--serializer', action='store', default='rq.serializers.DefaultSerializer', dest='serializer', help='Specify a custom Serializer.') parser.add_argument('args', nargs='*', type=str, help='The queues to work on, separated by space') # Args present in `rqworker` command but not yet implemented here # parser.add_argument('--worker-ttl', action='store', type=int, # dest='worker_ttl', default=420, # help='Default worker timeout to be used') # parser.add_argument('--max-jobs', action='store', default=None, dest='max_jobs', type=int, # help='Maximum number of jobs to execute') # parser.add_argument('--with-scheduler', action='store_true', dest='with_scheduler', # default=False, help='Run worker with scheduler enabled') # Sentry arguments parser.add_argument('--sentry-dsn', action='store', default=None, dest='sentry_dsn', help='Report exceptions to this Sentry DSN') parser.add_argument('--sentry-ca-certs', action='store', default=None, dest='sentry_ca_certs', help='A path to an alternative CA bundle file in PEM-format') parser.add_argument('--sentry-debug', action='store', default=False, dest='sentry_debug', help='Turns debug mode on or off.') def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) # Verbosity is defined by default in BaseCommand for all commands verbosity: int = options['verbosity'] if verbosity >= 2: logging_level = 'DEBUG' elif verbosity == 0: logging_level = 'WARNING' else: logging_level = 'INFO' setup_loghandlers(logging_level) sentry_dsn = options.pop('sentry_dsn') if sentry_dsn: try: configure_sentry(sentry_dsn, **options) except ImportError: self.stderr.write("Please install sentry-sdk using `pip install sentry-sdk`") sys.exit(1) job_class = get_job_class(options['job_class']) queues = get_queues(*args, **{'job_class': job_class, 'queue_class': options['queue_class']}) worker_class = get_worker_class(options.get('worker_class', None)) serializer = resolve_serializer(options['serializer']) pool = WorkerPool( queues=queues, connection=queues[0].connection, num_workers=options['num_workers'], serializer=serializer, worker_class=worker_class, job_class=job_class, ) pool.start(burst=options.get('burst', False), logging_level=logging_level) django-rq-3.1/django_rq/management/commands/rqstats.py0000664000175000017500000000611315043301563023056 0ustar carstencarstenimport click import time from django.core.management.base import BaseCommand, CommandError from ...utils import get_statistics class Command(BaseCommand): """ Print RQ statistics """ help = __doc__ _separator: str def add_arguments(self, parser): # TODO: convert this to @click.command like rq does parser.add_argument( '-j', '--json', action='store_true', dest='json', help='Output statistics as JSON', ) parser.add_argument( '-y', '--yaml', action='store_true', dest='yaml', help='Output statistics as YAML', ) parser.add_argument( '-i', '--interval', dest='interval', type=float, help='Poll statistics every N seconds', ) def _print_separator(self): try: click.echo(self._separator) except AttributeError: self._separator = "-" * self.table_width click.echo(self._separator) def _print_stats_dashboard(self, statistics): if self.interval: click.clear() click.echo() click.echo("Django RQ CLI Dashboard") click.echo() self._print_separator() # Header click.echo( """| %-15s|%10s |%10s |%10s |%10s |%10s |%10s |""" % ("Name", "Queued", "Active", "Deferred", "Finished", "Failed", "Workers") ) self._print_separator() # Print every queues in a row for queue in statistics["queues"]: click.echo( """| %-15s|%10s |%10s |%10s |%10s |%10s |%10s |""" % (queue["name"], queue["jobs"], queue["started_jobs"], queue["deferred_jobs"], queue["finished_jobs"],queue["failed_jobs"], queue["workers"]) ) self._print_separator() if self.interval: click.echo() click.echo("Press 'Ctrl+c' to quit") def handle(self, *args, **options): if options.get("json"): import json click.echo(json.dumps(get_statistics())) return if options.get("yaml"): try: import yaml except ImportError as ex: raise CommandError("PyYAML is not installed.") from ex # Disable YAML alias yaml.Dumper.ignore_aliases = lambda *args: True # type: ignore[method-assign] click.echo(yaml.dump(get_statistics(), default_flow_style=False)) return self.interval = options.get("interval") # Arbitrary self.table_width = 90 # Do not continuously poll if not self.interval: self._print_stats_dashboard(get_statistics()) return # Abuse clicks to 'live' render CLI dashboard TODO: Use curses instead try: while True: self._print_stats_dashboard(get_statistics()) time.sleep(self.interval) except KeyboardInterrupt: pass django-rq-3.1/django_rq/templates/0000775000175000017500000000000015043301563017063 5ustar carstencarstendjango-rq-3.1/django_rq/templates/django_rq/0000775000175000017500000000000015043301563021027 5ustar carstencarstendjango-rq-3.1/django_rq/templates/django_rq/jobs.html0000664000175000017500000002024215043301563022652 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}{{ job_status }} Jobs in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

{{ job_status }} jobs in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% if job_status == 'Scheduled' %} {% endif %} {% block extra_columns %} {% endblock extra_columns %} {% for job in jobs %} {% if job_status == 'Scheduled' %} {% endif %} {% block extra_columns_values %} {% endblock extra_columns_values %} {% endfor %}
ID
Created
Scheduled
Enqueued
Ended
Status
Callable
{{ job.id }} {% if job.created_at %} {{ job.created_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.scheduled_at %} {{ job.scheduled_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.enqueued_at %} {{ job.enqueued_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.ended_at %} {{ job.ended_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ job.get_status.value }} {{ job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/templates/django_rq/requeue_all.html0000664000175000017500000000220415043301563024216 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Are you sure?

{% endblock %} {% block content %}

Are you sure you want to requeue {{ total_jobs }} job{{ total_jobs|pluralize }} in the {{ queue.name }} queue? This action can not be undone.

{% csrf_token %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/clear_failed_queue.html0000664000175000017500000000221115043301563025507 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Are you sure?

{% endblock %} {% block content %}

Are you sure you want to delete {{ total_jobs }} failed job{{ total_jobs|pluralize }} in the {{ queue.name }} queue? This action can not be undone.

{% csrf_token %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/clear_queue.html0000664000175000017500000000211215043301563024203 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Are you sure?

{% endblock %} {% block content %}

Are you sure you want to clear the queue {{ queue.name }}? This action can not be undone.

{% csrf_token %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/scheduler.html0000664000175000017500000001176715043301563023707 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}Scheduler Jobs in {{ scheduler.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Scheduler Managed Jobs

{% endblock %} {% block content %}
{% csrf_token %}
{% for job in jobs %} {% endfor %}
ID
Schedule
Next Run
Last Ended
Last Status
Callable
{{ job.id }} {{ job.schedule }} {% if job.next_run %} {{ job.next_run|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.ended_at %} {{ job.ended_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ job.get_status.value }} {{ job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/templates/django_rq/scheduled_jobs.html0000664000175000017500000001654315043301563024703 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}{{ job_status }} Jobs in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Scheduled jobs in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% block extra_columns %} {% endblock extra_columns %} {% for job in jobs %} {% block extra_columns_values %} {% endblock extra_columns_values %} {% endfor %}
ID
Created
{% if sort_direction == 'ascending' %} {% else %} {% endif %}
{% if sort_direction == 'ascending' %} {% else %} {% endif %} Scheduled
Status
Callable
{{ job.id }} {% if job.created_at %} {{ job.created_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.scheduled_at %} {{ job.scheduled_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ job.get_status.value }} {{ job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/templates/django_rq/deferred_jobs.html0000664000175000017500000001547515043301563024526 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}{{ job_status }} Jobs in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Deferred jobs in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% block extra_columns %} {% endblock extra_columns %} {% for job in jobs %} {% block extra_columns_values %} {% endblock extra_columns_values %} {% endfor %}
ID
{% if sort_direction == 'ascending' %} {% else %} {% endif %}
{% if sort_direction == 'ascending' %} {% else %} {% endif %} Created
Status
Callable
{{ job.id }} {% if job.created_at %} {{ job.created_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ job.get_status.value }} {{ job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/templates/django_rq/started_job_registry.html0000664000175000017500000001500115043301563026142 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}{{ job_status }} Job Executions in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Job Executions in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% block extra_columns %} {% endblock extra_columns %} {% for execution in executions %} {% block extra_columns_values %} {% endblock extra_columns_values %} {% endfor %}
Execution ID
Created
Last Heartbeat
Enqueued
Callable
{{ execution.id }} {% if execution.created_at %} {{ execution.created_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if execution.last_heartbeat %} {{ execution.last_heartbeat|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if execution.job.enqueued_at %} {{ execution.job.enqueued_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ execution.job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/templates/django_rq/stats.html0000664000175000017500000002033615043301563023057 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block title %}Queues{% endblock %} {% block content_title %}

Queues

{% endblock %} {% block breadcrumbs %} {% endblock %} {% block content %}
{% csrf_token %}
{% if queue.scheduler_pid is not False %} {% endif%} {% for queue in queues %} {% if queue.scheduler_pid is not False %} {% endif %} {% endfor %}
Name
Queued Jobs
Oldest Queued Job
Active Jobs
Deferred Jobs
Finished Jobs
Failed Jobs
Scheduled Jobs
Workers
Host
Port
DB
Scheduler PID
{{ queue.name }} {{ queue.jobs }} {{ queue.oldest_job_timestamp }} {{ queue.started_jobs }} {{ queue.deferred_jobs }} {{ queue.finished_jobs }} {{ queue.failed_jobs }} {{ queue.scheduled_jobs }} {{ queue.workers }} {{ queue.connection_kwargs.host }} {{ queue.connection_kwargs.port }} {{ queue.connection_kwargs.db }}{{ queue.scheduler_pid|default_if_none:"Inactive" }}

View as JSON

{% if view_metrics %}

View Metrics

{% endif %}
{% if schedulers %}

RQ Scheduler

{% for connection, scheduler in schedulers.items %} {% endfor %}
Redis Connection Recurring Jobs
{{ connection }} {{ scheduler.count }}
{% endif %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/test.html0000664000175000017500000000000315043301563022665 0ustar carstencarstenFoodjango-rq-3.1/django_rq/templates/django_rq/delete_job.html0000664000175000017500000000235415043301563024015 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Are you sure?

{% endblock %} {% block content %}

Are you sure you want to delete {{ job.id }} from {{ queue.name }}? This action can not be undone.

{% csrf_token %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/confirm_action.html0000664000175000017500000000315215043301563024710 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Are you sure?

{% endblock %} {% block content %}

Are you sure you want to {{ action|capfirst }} the selected jobs from {{ queue.name }}? These jobs are selected:

{% csrf_token %}
{% for job_id in job_ids %} {% endfor %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/job_detail.html0000664000175000017500000002401415043301563024012 0ustar carstencarsten{% extends "admin/change_list.html" %} {% load static django_rq %} {% block title %}Job {{ job.id }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Job {{ job.id }}

{% endblock %} {# do not render object-tools (fix until https://github.com/django/django/pull/19389/files is released) #} {% block object-tools %}{% endblock %} {% block content %}
{{ job.origin }}
{{ job.timeout }}
{{ job.result_ttl }}
{% if job.created_at %}
{{ job.created_at|to_localtime|date:"Y-m-d, H:i:s" }}
{% endif %} {% if job.enqueued_at %}
{{ job.enqueued_at|to_localtime|date:"Y-m-d, H:i:s" }}
{% endif %} {% if job.started_at %}
{{ job.started_at|to_localtime|date:"Y-m-d, H:i:s" }}
{% endif %} {% if job.ended_at %}
{{ job.ended_at|to_localtime|date:"Y-m-d, H:i:s" }}
{% endif %}
{{ job.get_status.value }}
{{ job|show_func_name }}
{{ job.meta }}
{% if data_is_valid %} {% if job.args %}
    {% for arg in job.args %}
  • {{ arg|force_escape }}
  • {% endfor %}
{% endif %} {% else %} Unpickling Error {% endif %}
{% if data_is_valid %} {% if job.kwargs %}
    {% for key, value in job.kwargs|items %}
  • {{ key }}: {{ value|force_escape }}
  • {% endfor %}
{% endif %} {% else %} Unpickling Error {% endif %}
{% if dependencies %}
{% for dependency in dependencies %} {% if not forloop.first %}
{% endif %} {% if dependency.1 %} {{ dependency.1.func_name }} {% else %} Deleted {% endif %} ({{ dependency.0 }}) {% endfor %}
{% endif %} {% if exc_info %}
{% if job.exc_info %}{{ job.exc_info|linebreaks }}{% endif %}
{% endif %} {% if job.legacy_result %}
{{ job.result }}
{% endif %}
{% if job.is_started %}
{% csrf_token %}
{% endif %} {% if job.is_failed %}
{% csrf_token %}
{% endif %} {% if not job.is_queued and not job.is_failed %}
{% csrf_token %}
{% endif %}
{% for result in job.results %}

Result {{ result.id }}

{% endfor %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/failed_jobs.html0000664000175000017500000002041115043301563024154 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}Failed Jobs in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

{{ job_status }} jobs in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% block extra_columns %} {% endblock extra_columns %} {% for job in jobs %} {% if job_status == 'Scheduled' %} {% endif %} {% block extra_columns_values %} {% endblock extra_columns_values %} {% endfor %}
ID
Created
Enqueued
{% if sort_direction == 'ascending' %} {% else %} {% endif %}
{% if sort_direction == 'ascending' %} {% else %} {% endif %} Ended
Status
Callable
{{ job.id }} {% if job.created_at %} {{ job.created_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.scheduled_at %} {{ job.scheduled_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.enqueued_at %} {{ job.enqueued_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.ended_at %} {{ job.ended_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ job.get_status.value }} {{ job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/templates/django_rq/worker_details.html0000664000175000017500000000643115043301563024737 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static django_rq %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Worker Info

{% endblock %} {% block content %}
{{ worker.name }}
{{ worker.get_state }}
{{ worker.birth_date|to_localtime|date:"Y-m-d, H:i:s" }}
{{ queue_names }}
{{ worker.pid }}
{% if job %}
{{ job.func_name }} ({{ job.id }})
{% endif %} {% if worker.successful_job_count != None %}
{{ worker.successful_job_count }}
{% endif %} {% if worker.failed_job_count != None %}
{{ worker.failed_job_count }}
{% endif %} {% if worker.total_working_time != None %}
{{ total_working_time }}
{% endif %}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/workers.html0000664000175000017500000000546715043301563023425 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq l10n %} {% block title %}Workers in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

Workers in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% for worker in workers %} {% endfor %}
Name
State
Birth
PID
{{ worker.name }} {{ worker.get_state }} {{ worker.birth_date|to_localtime|date:"Y-m-d, H:i:s" }} {{ worker.pid|unlocalize }}
{% endblock %} django-rq-3.1/django_rq/templates/django_rq/finished_jobs.html0000664000175000017500000002041315043301563024523 0ustar carstencarsten{% extends "admin/base_site.html" %} {% load static jquery_path django_rq %} {% block title %}Finished Jobs in {{ queue.name }} {{ block.super }}{% endblock %} {% block extrastyle %} {{ block.super }} {% endblock %} {% block extrahead %} {{ block.super }} {% endblock %} {% block breadcrumbs %} {% endblock %} {% block content_title %}

{{ job_status }} jobs in {{ queue.name }}

{% endblock %} {% block content %}
{% csrf_token %}
{% block extra_columns %} {% endblock extra_columns %} {% for job in jobs %} {% if job_status == 'Scheduled' %} {% endif %} {% block extra_columns_values %} {% endblock extra_columns_values %} {% endfor %}
ID
Created
Enqueued
{% if sort_direction == 'ascending' %} {% else %} {% endif %}
{% if sort_direction == 'ascending' %} {% else %} {% endif %} Ended
Status
Callable
{{ job.id }} {% if job.created_at %} {{ job.created_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.scheduled_at %} {{ job.scheduled_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.enqueued_at %} {{ job.enqueued_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {% if job.ended_at %} {{ job.ended_at|to_localtime|date:"Y-m-d, H:i:s" }} {% endif %} {{ job.get_status.value }} {{ job|show_func_name }}

{% for p in page_range %} {% if p == page %} {{ p }} {% elif forloop.last %} {{ p }} {% else %} {{ p }} {% endif %} {% endfor %} {{ num_jobs }} jobs

{% endblock %} django-rq-3.1/django_rq/thread_queue.py0000664000175000017500000000217415043301563020116 0ustar carstencarstenimport threading _thread_data = threading.local() def get_queue(): """ Returns a temporary queue to store jobs before they're committed later in the request/response cycle. Each job is stored as a tuple containing the queue, args and kwargs. For example, if we call ``queue.enqueue_call(foo, kwargs={'bar': 'baz'})`` during the request/response cycle, job_queue will look like: job_queue = [(default_queue, foo, {'kwargs': {'bar': 'baz'}})] This implementation is heavily inspired by https://github.com/chrisdoble/django-celery-transactions """ return _thread_data.__dict__.setdefault("job_queue", []) def add(queue, args, kwargs): get_queue().append((queue, args, kwargs)) def commit(*args, **kwargs): """ Processes all jobs in the delayed queue. """ delayed_queue = get_queue() try: while delayed_queue: queue, args, kwargs = delayed_queue.pop(0) queue.original_enqueue_call(*args, **kwargs) finally: clear() def clear(*args, **kwargs): try: del _thread_data.job_queue except AttributeError: pass django-rq-3.1/django_rq/jobs.py0000664000175000017500000000101415043301563016370 0ustar carstencarstenfrom rq.job import Job from rq.utils import import_attribute from django.conf import settings def get_job_class(job_class=None): """ Return job class from RQ settings, otherwise return Job. If `job_class` is not None, it is used as an override (can be python import path as string). """ RQ = getattr(settings, 'RQ', {}) if job_class is None: job_class = RQ.get('JOB_CLASS', Job) if isinstance(job_class, str): job_class = import_attribute(job_class) return job_class django-rq-3.1/django_rq/templatetags/0000775000175000017500000000000015043301563017557 5ustar carstencarstendjango-rq-3.1/django_rq/templatetags/__init__.py0000664000175000017500000000000015043301563021656 0ustar carstencarstendjango-rq-3.1/django_rq/templatetags/jquery_path.py0000664000175000017500000000023015043301563022457 0ustar carstencarstenfrom django import template register = template.Library() @register.simple_tag def get_jquery_path(): return 'admin/js/vendor/jquery/jquery.js' django-rq-3.1/django_rq/templatetags/django_rq.py0000664000175000017500000000154415043301563022101 0ustar carstencarstenimport datetime from django import template from django.utils import timezone from django.utils.html import escape register = template.Library() @register.filter def to_localtime(time): """Converts naive datetime to localtime based on settings""" utc_time = time.replace(tzinfo=datetime.timezone.utc) to_zone = timezone.get_default_timezone() return utc_time.astimezone(to_zone) @register.filter def show_func_name(job): """Shows job.func_name and handles errors during deserialization""" try: return job.func_name except Exception as e: return repr(e) @register.filter def force_escape(text): return escape(text) @register.filter def items(dictionary): """ Explicitly calls `dictionary.items` function to avoid django from accessing the key `items` if any. """ return dictionary.items() django-rq-3.1/django_rq/workers.py0000664000175000017500000000351415043301563017136 0ustar carstencarstenfrom typing import Optional, Type, Union from rq import Worker from rq.job import Job from rq.utils import import_attribute from django.conf import settings from .jobs import get_job_class from .queues import DjangoRQ, get_queues def get_exception_handlers(): """ Custom exception handlers could be defined in settings.py: RQ = { 'EXCEPTION_HANDLERS': ['path.to.handler'], } """ from .settings import EXCEPTION_HANDLERS return [import_attribute(path) for path in EXCEPTION_HANDLERS] def get_worker_class(worker_class=None): """ Return worker class from RQ settings, otherwise return Worker. If `worker_class` is not None, it is used as an override (can be python import path as string). """ RQ = getattr(settings, 'RQ', {}) if worker_class is None: worker_class = Worker if 'WORKER_CLASS' in RQ: worker_class = RQ.get('WORKER_CLASS') if isinstance(worker_class, str): worker_class = import_attribute(worker_class) return worker_class def get_worker( *queue_names: str, job_class: Optional[Union[str, Type[Job]]] = None, queue_class: Optional[Union[str, Type[DjangoRQ]]] = None, worker_class: Optional[Union[str, Type[Worker]]] = None, **kwargs, ) -> Worker: """ Returns a RQ worker for all queues or specified ones. """ job_class = get_job_class(job_class) queues = get_queues(*queue_names, job_class=job_class, queue_class=queue_class) # normalize queue_class to what get_queues returns queue_class = queues[0].__class__ worker_class = get_worker_class(worker_class) return worker_class( queues, connection=queues[0].connection, exception_handlers=get_exception_handlers() or None, job_class=job_class, queue_class=queue_class, **kwargs ) django-rq-3.1/django_rq/queues.py0000664000175000017500000002772615043301563016764 0ustar carstencarstenimport warnings from typing import Any, Callable, Optional, Type, Union import redis from redis.sentinel import Sentinel from rq.job import Job from rq.queue import Queue from rq.utils import import_attribute from django.conf import settings from django.core.exceptions import ImproperlyConfigured from . import thread_queue from .jobs import get_job_class def get_commit_mode(): """ Disabling AUTOCOMMIT causes enqueued jobs to be stored in a temporary queue. Jobs in this queue are only enqueued after the request is completed and are discarded if the request causes an exception (similar to db transactions). To disable autocommit, put this in settings.py: RQ = { 'AUTOCOMMIT': False, } """ RQ = getattr(settings, 'RQ', {}) return RQ.get('AUTOCOMMIT', True) def get_queue_class(config=None, queue_class=None): """ Return queue class from config or from RQ settings, otherwise return DjangoRQ. If ``queue_class`` is provided, it takes priority. The full priority list for queue class sources: 1. ``queue_class`` argument 2. ``QUEUE_CLASS`` in ``config`` argument 3. ``QUEUE_CLASS`` in base settings (``RQ``) """ RQ = getattr(settings, 'RQ', {}) if queue_class is None: queue_class = RQ.get('QUEUE_CLASS', DjangoRQ) if config: queue_class = config.get('QUEUE_CLASS', queue_class) if isinstance(queue_class, str): queue_class = import_attribute(queue_class) return queue_class class DjangoRQ(Queue): """ A subclass of RQ's QUEUE that allows jobs to be stored temporarily to be enqueued later at the end of Django's request/response cycle. """ def __init__(self, *args, **kwargs): autocommit = kwargs.pop('autocommit', None) self._autocommit = get_commit_mode() if autocommit is None else autocommit super(DjangoRQ, self).__init__(*args, **kwargs) def original_enqueue_call(self, *args, **kwargs): from .settings import QUEUES queue_name = kwargs.get('queue_name') or self.name kwargs['result_ttl'] = kwargs.get('result_ttl', get_result_ttl(queue_name)) return super(DjangoRQ, self).enqueue_call(*args, **kwargs) def enqueue_call(self, *args, **kwargs): if self._autocommit: return self.original_enqueue_call(*args, **kwargs) else: thread_queue.add(self, args, kwargs) def get_redis_connection(config, use_strict_redis=False): """ Returns a redis connection from a connection config """ redis_cls = redis.StrictRedis if use_strict_redis else redis.Redis if 'URL' in config: if config.get('SSL') or config.get('URL').startswith('rediss://'): return redis_cls.from_url( config['URL'], db=config.get('DB'), ssl_cert_reqs=config.get('SSL_CERT_REQS', 'required'), ) else: return redis_cls.from_url( config['URL'], db=config.get('DB'), ) if 'USE_REDIS_CACHE' in config.keys(): try: # Assume that we're using django-redis from django_redis import get_redis_connection as get_redis return get_redis(config['USE_REDIS_CACHE']) except ImportError: pass from django.core.cache import caches cache = caches[config['USE_REDIS_CACHE']] # We're using django-redis-cache try: return cache._client # type: ignore[attr-defined] except AttributeError: # For django-redis-cache > 0.13.1 return cache.get_master_client() # type: ignore[attr-defined] if 'UNIX_SOCKET_PATH' in config: return redis_cls(unix_socket_path=config['UNIX_SOCKET_PATH'], db=config['DB']) if 'SENTINELS' in config: connection_kwargs = { 'db': config.get('DB'), 'password': config.get('PASSWORD'), 'username': config.get('USERNAME'), 'socket_timeout': config.get('SOCKET_TIMEOUT'), } connection_kwargs.update(config.get('CONNECTION_KWARGS', {})) sentinel_kwargs = config.get('SENTINEL_KWARGS', {}) sentinel = Sentinel(config['SENTINELS'], sentinel_kwargs=sentinel_kwargs, **connection_kwargs) return sentinel.master_for( service_name=config['MASTER_NAME'], redis_class=redis_cls, ) return redis_cls( host=config['HOST'], port=config['PORT'], db=config.get('DB', 0), username=config.get('USERNAME', None), password=config.get('PASSWORD'), ssl=config.get('SSL', False), ssl_cert_reqs=config.get('SSL_CERT_REQS', 'required'), **config.get('REDIS_CLIENT_KWARGS', {}) ) def get_connection( name: str = 'default', use_strict_redis: bool = False, ) -> redis.Redis: """ Returns a Redis connection to use based on parameters in settings.RQ_QUEUES """ from .settings import QUEUES return get_redis_connection(QUEUES[name], use_strict_redis) def get_queue( name: str = 'default', default_timeout: Optional[int] = None, is_async: Optional[bool] = None, autocommit: Optional[bool] = None, connection: Optional[redis.Redis] = None, queue_class: Optional[Union[str, Type[DjangoRQ]]] = None, job_class: Optional[Union[str, Type[Job]]] = None, serializer: Any = None, **kwargs: Any, ) -> DjangoRQ: """ Returns an rq Queue using parameters defined in ``RQ_QUEUES`` """ from .settings import QUEUES if kwargs.get('async') is not None: is_async = kwargs['async'] warnings.warn('The `async` keyword is deprecated. Use `is_async` instead', DeprecationWarning) # If is_async is provided, use it, otherwise, get it from the configuration if is_async is None: is_async = QUEUES[name].get('ASYNC', True) # same for job_class job_class = get_job_class(job_class) if default_timeout is None: default_timeout = QUEUES[name].get('DEFAULT_TIMEOUT') if connection is None: connection = get_connection(name) if serializer is None: serializer = QUEUES[name].get('SERIALIZER') queue_class = get_queue_class(QUEUES[name], queue_class) return queue_class( name, default_timeout=default_timeout, connection=connection, is_async=is_async, job_class=job_class, autocommit=autocommit, serializer=serializer, **kwargs ) def get_queue_by_index(index): """ Returns an rq Queue using parameters defined in ``QUEUES_LIST`` """ from .settings import QUEUES_LIST config = QUEUES_LIST[int(index)] return get_queue_class(config)( config['name'], connection=get_redis_connection(config['connection_config']), is_async=config.get('ASYNC', True), serializer=config['connection_config'].get('SERIALIZER') ) def get_scheduler_by_index(index): """ Returns an rq-scheduler Scheduler using parameters defined in ``QUEUES_LIST`` """ from .settings import QUEUES_LIST config = QUEUES_LIST[int(index)] return get_scheduler(config['name']) def filter_connection_params(queue_params): """ Filters the queue params to keep only the connection related params. """ CONNECTION_PARAMS = ( 'URL', 'DB', 'USE_REDIS_CACHE', 'UNIX_SOCKET_PATH', 'HOST', 'PORT', 'PASSWORD', 'SENTINELS', 'MASTER_NAME', 'SOCKET_TIMEOUT', 'SSL', 'CONNECTION_KWARGS', ) # return {p:v for p,v in queue_params.items() if p in CONNECTION_PARAMS} # Dict comprehension compatible with python 2.6 return dict((p, v) for (p, v) in queue_params.items() if p in CONNECTION_PARAMS) def get_queues(*queue_names, **kwargs): """ Return queue instances from specified queue names. All instances must use the same Redis connection. """ from .settings import QUEUES if len(queue_names) <= 1: # Return "default" queue if no queue name is specified # or one queue with specified name return [get_queue(*queue_names, **kwargs)] # will return more than one queue # import job class only once for all queues kwargs['job_class'] = get_job_class(kwargs.pop('job_class', None)) queue_params = QUEUES[queue_names[0]] connection_params = filter_connection_params(queue_params) queues = [get_queue(queue_names[0], **kwargs)] # do consistency checks while building return list for name in queue_names[1:]: queue = get_queue(name, **kwargs) if type(queue) is not type(queues[0]): raise ValueError( 'Queues must have the same class.' '"{0}" and "{1}" have ' 'different classes'.format(name, queue_names[0]) ) if connection_params != filter_connection_params(QUEUES[name]): raise ValueError( 'Queues must have the same redis connection.' '"{0}" and "{1}" have ' 'different connections'.format(name, queue_names[0]) ) queues.append(queue) return queues def enqueue(func: Callable, *args, **kwargs) -> Job: """ A convenience function to put a job in the default queue. Usage:: from django_rq import enqueue enqueue(func, *args, **kwargs) """ return get_queue().enqueue(func, *args, **kwargs) def get_unique_connection_configs(config=None): """ Returns a list of unique Redis connections from config """ if config is None: from .settings import QUEUES config = QUEUES connection_configs = [] for key, value in config.items(): value = filter_connection_params(value) if value not in connection_configs: connection_configs.append(value) return connection_configs def get_result_ttl(name: str = 'default'): """ Returns the result ttl from RQ_QUEUES if found, otherwise from RQ """ from .settings import QUEUES RQ = getattr(settings, 'RQ', {}) return QUEUES[name].get('DEFAULT_RESULT_TTL', RQ.get('DEFAULT_RESULT_TTL')) """ If rq_scheduler is installed, provide a ``get_scheduler`` function that behaves like ``get_connection``, except that it returns a ``Scheduler`` instance instead of a ``Queue`` instance. """ try: from rq_scheduler import Scheduler class DjangoScheduler(Scheduler): """ Use settings ``DEFAULT_RESULT_TTL`` from ``RQ`` and ``DEFAULT_TIMEOUT`` from ``RQ_QUEUES`` if configured. """ def _create_job(self, *args, **kwargs): from .settings import QUEUES if kwargs.get('timeout') is None: queue_name = kwargs.get('queue_name') or self.queue_name kwargs['timeout'] = QUEUES[queue_name].get('DEFAULT_TIMEOUT') if kwargs.get('result_ttl') is None: kwargs['result_ttl'] = getattr(settings, 'RQ', {}).get('DEFAULT_RESULT_TTL') return super(DjangoScheduler, self)._create_job(*args, **kwargs) def get_scheduler( name: str = 'default', queue: Optional[DjangoRQ] = None, interval: int = 60, connection: Optional[redis.Redis] = None, ) -> DjangoScheduler: """ Returns an RQ Scheduler instance using parameters defined in ``RQ_QUEUES`` """ RQ = getattr(settings, 'RQ', {}) scheduler_class = RQ.get('SCHEDULER_CLASS', DjangoScheduler) if isinstance(scheduler_class, str): scheduler_class = import_attribute(scheduler_class) if connection is None: connection = get_connection(name) if queue is None: queue = get_queue(name, connection=connection) return scheduler_class( queue_name=name, interval=interval, queue=queue, job_class=queue.job_class, connection=connection ) except ImportError: def get_scheduler(*args, **kwargs): # type: ignore[misc] raise ImproperlyConfigured('rq_scheduler not installed') django-rq-3.1/.github/0000775000175000017500000000000015043301563014461 5ustar carstencarstendjango-rq-3.1/.github/FUNDING.yml0000664000175000017500000000062015043301563016274 0ustar carstencarsten# These are supported funding model platforms github: [selwin] patreon: # Replace with a single Patreon username open_collective: # Replace with a single Open Collective username ko_fi: # Replace with a single Ko-fi username tidelift: "pypi/django-rq" community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry custom: # Replace with a single custom sponsorship URL django-rq-3.1/.github/dependabot.yml0000664000175000017500000000027515043301563017315 0ustar carstencarstenversion: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: daily open-pull-requests-limit: 10 ignore: - dependency-name: django versions: - 3.1.6 django-rq-3.1/.github/workflows/0000775000175000017500000000000015043301563016516 5ustar carstencarstendjango-rq-3.1/.github/workflows/test.yml0000664000175000017500000000376015043301563020226 0ustar carstencarstenname: Test on: push: branches: [master] pull_request: branches: [master] permissions: contents: read # to fetch code (actions/checkout) jobs: build: runs-on: ubuntu-latest name: Python${{ matrix.python-version }}/Django${{ matrix.django-version }} strategy: matrix: python-version: ["3.10", "3.11", "3.12", "3.13"] django-version: ["4.2", "5.0", "5.1", "5.2"] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4.2.0 with: python-version: ${{ matrix.python-version }} - name: Start Redis uses: supercharge/redis-github-action@1.4.0 - name: Install dependencies run: | python -m pip install --upgrade pip pip install django==${{ matrix.django-version }} \ redis django-redis pyyaml rq sentry-sdk - name: Run Test run: | `which django-admin` test django_rq --settings=django_rq.tests.settings --pythonpath=. -v2 - name: Install optional dependencies run: | pip install prometheus_client - name: Run Test with optional dependencies run: | `which django-admin` test django_rq --settings=django_rq.tests.settings --pythonpath=. -v2 mypy: runs-on: ubuntu-latest name: Type check steps: - uses: actions/checkout@v3 - name: Set up Python 3.8 uses: actions/setup-python@v4.2.0 with: python-version: "3.8" - name: Install dependencies run: | python -m pip install --upgrade pip pip install django-stubs[compatible-mypy] rq types-redis - name: Run mypy continue-on-error: true id: mypy run: | mypy django_rq - name: Set Status if: steps.mypy.outcome == 'failure' run: | echo "Mypy found errors, marking check as neutral" exit 78 # Exit code 78 results in a neutral check django-rq-3.1/.gitignore0000664000175000017500000000020115043301563015102 0ustar carstencarsten.DS_Store *.pyc *.sw* .env .python-version dist/ django_rq.egg-info/ .pytest_cache/ *.rdb # pycharm .idea .vscode Pipfile.lock django-rq-3.1/README.rst0000664000175000017500000004562715043301563014626 0ustar carstencarsten========= Django-RQ ========= |Build Status| Django integration with `RQ `__, a `Redis `__ based Python queuing library. `Django-RQ `__ is a simple app that allows you to configure your queues in django's ``settings.py`` and easily use them in your project. ================= Support Django-RQ ================= If you find ``django-rq`` useful, please consider supporting its development via `Tidelift `_. ============ Requirements ============ * `Django `__ (3.2+) * `RQ `__ ============ Installation ============ * Install ``django-rq`` (or `download from PyPI `__): .. code-block:: python pip install django-rq * Add ``django_rq`` to ``INSTALLED_APPS`` in ``settings.py``: .. code-block:: python INSTALLED_APPS = ( # other apps "django_rq", ) * Configure your queues in django's ``settings.py``: .. code-block:: python RQ_QUEUES = { 'default': { 'HOST': 'localhost', 'PORT': 6379, 'DB': 0, 'USERNAME': 'some-user', 'PASSWORD': 'some-password', 'DEFAULT_TIMEOUT': 360, 'DEFAULT_RESULT_TTL': 800, 'REDIS_CLIENT_KWARGS': { # Eventual additional Redis connection arguments 'ssl_cert_reqs': None, }, }, 'with-sentinel': { 'SENTINELS': [('localhost', 26736), ('localhost', 26737)], 'MASTER_NAME': 'redismaster', 'DB': 0, # Redis username/password 'USERNAME': 'redis-user', 'PASSWORD': 'secret', 'SOCKET_TIMEOUT': 0.3, 'CONNECTION_KWARGS': { # Eventual additional Redis connection arguments 'ssl': True }, 'SENTINEL_KWARGS': { # Eventual Sentinel connection arguments # If Sentinel also has auth, username/password can be passed here 'username': 'sentinel-user', 'password': 'secret', }, }, 'high': { 'URL': os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0'), # If you're on Heroku 'DEFAULT_TIMEOUT': 500, }, 'low': { 'HOST': 'localhost', 'PORT': 6379, 'DB': 0, } } RQ_EXCEPTION_HANDLERS = ['path.to.my.handler'] # If you need custom exception handlers * Include ``django_rq.urls`` in your ``urls.py``: .. code-block:: python urlpatterns += [ path('django-rq/', include('django_rq.urls')) ] ===== Usage ===== Putting jobs in the queue ------------------------- `Django-RQ` allows you to easily put jobs into any of the queues defined in ``settings.py``. It comes with a few utility functions: * ``enqueue`` - push a job to the ``default`` queue: .. code-block:: python import django_rq django_rq.enqueue(func, foo, bar=baz) * ``get_queue`` - returns an ``Queue`` instance. .. code-block:: python import django_rq queue = django_rq.get_queue('high') queue.enqueue(func, foo, bar=baz) In addition to ``name`` argument, ``get_queue`` also accepts ``default_timeout``, ``is_async``, ``autocommit``, ``connection`` and ``queue_class`` arguments. For example: .. code-block:: python queue = django_rq.get_queue('default', autocommit=True, is_async=True, default_timeout=360) queue.enqueue(func, foo, bar=baz) You can provide your own singleton Redis connection object to this function so that it will not create a new connection object for each queue definition. This will help you limit number of connections to Redis server. For example: .. code-block:: python import django_rq import redis redis_cursor = redis.StrictRedis(host='', port='', db='', password='') high_queue = django_rq.get_queue('high', connection=redis_cursor) low_queue = django_rq.get_queue('low', connection=redis_cursor) * ``get_connection`` - accepts a single queue name argument (defaults to "default") and returns a connection to the queue's Redis server: .. code-block:: python import django_rq redis_conn = django_rq.get_connection('high') * ``get_worker`` - accepts optional queue names and returns a new `RQ` ``Worker`` instance for specified queues (or ``default`` queue): .. code-block:: python import django_rq worker = django_rq.get_worker() # Returns a worker for "default" queue worker.work() worker = django_rq.get_worker('low', 'high') # Returns a worker for "low" and "high" @job decorator -------------- To easily turn a callable into an RQ task, you can also use the ``@job`` decorator that comes with ``django_rq``: .. code-block:: python from django_rq import job @job def long_running_func(): pass long_running_func.delay() # Enqueue function in "default" queue @job('high') def long_running_func(): pass long_running_func.delay() # Enqueue function in "high" queue You can pass in any arguments that RQ's job decorator accepts: .. code-block:: python @job('default', timeout=3600) def long_running_func(): pass long_running_func.delay() # Enqueue function with a timeout of 3600 seconds. It's possible to specify default for ``result_ttl`` decorator keyword argument via ``DEFAULT_RESULT_TTL`` setting: .. code-block:: python RQ = { 'DEFAULT_RESULT_TTL': 5000, } With this setting, job decorator will set ``result_ttl`` to 5000 unless it's specified explicitly or included in the queue config. Running workers --------------- django_rq provides a management command that starts a worker for every queue specified as arguments:: python manage.py rqworker high default low If you want to run ``rqworker`` in burst mode, you can pass in the ``--burst`` flag:: python manage.py rqworker high default low --burst If you need to use custom worker, job or queue classes, it is best to use global settings (see `Custom queue classes`_ and `Custom job and worker classes`_). However, it is also possible to override such settings with command line options as follows. To use a custom worker class, you can pass in the ``--worker-class`` flag with the path to your worker:: python manage.py rqworker high default low --worker-class 'path.to.GeventWorker' To use a custom queue class, you can pass in the ``--queue-class`` flag with the path to your queue class:: python manage.py rqworker high default low --queue-class 'path.to.CustomQueue' To use a custom job class, provide ``--job-class`` flag. Starting from version 2.10, running RQ's worker-pool is also supported:: python manage.py rqworker-pool default low medium --num-workers 4 Support for Scheduled Jobs -------------------------- With RQ 1.2.0. you can use `built-in scheduler `__ for your jobs. For example: .. code-block:: python from django_rq.queues import get_queue queue = get_queue('default') job = queue.enqueue_at(datetime(2020, 10, 10), func) If you are using built-in scheduler you have to start workers with scheduler support:: python manage.py rqworker --with-scheduler Alternatively you can use `RQ Scheduler `__. After install you can also use the ``get_scheduler`` function to return a ``Scheduler`` instance for queues defined in settings.py's ``RQ_QUEUES``. For example: .. code-block:: python import django_rq scheduler = django_rq.get_scheduler('default') job = scheduler.enqueue_at(datetime(2020, 10, 10), func) You can also use the management command ``rqscheduler`` to start the scheduler:: python manage.py rqscheduler Support for django-redis and django-redis-cache ----------------------------------------------- If you have `django-redis `__ or `django-redis-cache `__ installed, you can instruct django_rq to use the same connection information from your Redis cache. This has two advantages: it's DRY and it takes advantage of any optimization that may be going on in your cache setup (like using connection pooling or `Hiredis `__.) To use configure it, use a dict with the key ``USE_REDIS_CACHE`` pointing to the name of the desired cache in your ``RQ_QUEUES`` dict. It goes without saying that the chosen cache must exist and use the Redis backend. See your respective Redis cache package docs for configuration instructions. It's also important to point out that since the django-redis-cache ``ShardedClient`` splits the cache over multiple Redis connections, it does not work. Here is an example settings fragment for `django-redis`: .. code-block:: python CACHES = { 'redis-cache': { 'BACKEND': 'redis_cache.cache.RedisCache', 'LOCATION': 'localhost:6379:1', 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', 'MAX_ENTRIES': 5000, }, }, } RQ_QUEUES = { 'high': { 'USE_REDIS_CACHE': 'redis-cache', }, 'low': { 'USE_REDIS_CACHE': 'redis-cache', }, } Suspending and Resuming Workers ------------------------------- Sometimes you may want to suspend RQ to prevent it from processing new jobs. A classic example is during the initial phase of a deployment script or in advance of putting your site into maintenance mode. This is particularly helpful when you have jobs that are relatively long-running and might otherwise be forcibly killed during the deploy. The `suspend` command stops workers on _all_ queues (in a single Redis database) from picking up new jobs. However currently running jobs will continue until completion. .. code-block:: bash # Suspend indefinitely python manage.py rqsuspend # Suspend for a specific duration (in seconds) then automatically # resume work again. python manage.py rqsuspend -d 600 # Resume work again. python manage.py rqresume Queue Statistics ---------------- ``django_rq`` also provides a dashboard to monitor the status of your queues at ``/django-rq/`` (or whatever URL you set in your ``urls.py`` during installation. You can also add a link to this dashboard link in ``/admin`` by adding ``RQ_SHOW_ADMIN_LINK = True`` in ``settings.py``. Be careful though, this will override the default admin template so it may interfere with other apps that modifies the default admin template. These statistics are also available in JSON format via ``/django-rq/stats.json``, which is accessible to staff members. If you need to access this view via other HTTP clients (for monitoring purposes), you can define ``RQ_API_TOKEN``. Then, include the token in the Authorization header as a Bearer token: ``Authorization: Bearer `` and access it via ``/django-rq/stats.json``. .. image:: demo-django-rq-json-dashboard.png Note: Statistics of scheduled jobs display jobs from `RQ built-in scheduler `__, not optional `RQ scheduler `__. Additionally, these statistics are also accessible from the command line. .. code-block:: bash python manage.py rqstats python manage.py rqstats --interval=1 # Refreshes every second python manage.py rqstats --json # Output as JSON python manage.py rqstats --yaml # Output as YAML .. image:: demo-django-rq-cli-dashboard.gif Configuring Prometheus ---------------------- ``django_rq`` also provides a Prometheus compatible view, which can be enabled by installing ``prometheus_client`` or installing the extra "prometheus-metrics" (``pip install django-rq[prometheus]``). The metrics are exposed at ``/django-rq/metrics/`` and the following is an example of the metrics that are exported:: # HELP rq_workers RQ workers # TYPE rq_workers gauge # HELP rq_job_successful_total RQ successful job count # TYPE rq_job_successful_total counter # HELP rq_job_failed_total RQ failed job count # TYPE rq_job_failed_total counter # HELP rq_working_seconds_total RQ total working time # TYPE rq_working_seconds_total counter # HELP rq_jobs RQ jobs by status # TYPE rq_jobs gauge rq_jobs{queue="default",status="queued"} 0.0 rq_jobs{queue="default",status="started"} 0.0 rq_jobs{queue="default",status="finished"} 0.0 rq_jobs{queue="default",status="failed"} 0.0 rq_jobs{queue="default",status="deferred"} 0.0 rq_jobs{queue="default",status="scheduled"} 0.0 If you need to access this view via other HTTP clients (for monitoring purposes), you can define ``RQ_API_TOKEN``. Then, include the token in the Authorization header as a Bearer token: ``Authorization: Bearer `` and access it via ``/django-rq/metrics``. Configuring Sentry ------------------- Sentry should be configured within the Django ``settings.py`` as described in the `Sentry docs `__. You can override the default Django Sentry configuration when running the ``rqworker`` command by passing the ``sentry-dsn`` option: ``./manage.py rqworker --sentry-dsn=https://*****@sentry.io/222222`` This will override any existing Django configuration and reinitialise Sentry, setting the following Sentry options: .. code-block:: python { 'debug': options.get('sentry_debug'), 'ca_certs': options.get('sentry_ca_certs'), 'integrations': [RedisIntegration(), RqIntegration(), DjangoIntegration()] } Configuring Logging ------------------- RQ uses Python's ``logging``, this means you can easily configure ``rqworker``'s logging mechanism in django's ``settings.py``. For example: .. code-block:: python LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "rq_console": { "format": "%(asctime)s %(message)s", "datefmt": "%H:%M:%S", }, }, "handlers": { "rq_console": { "level": "DEBUG", "class": "rq.logutils.ColorizingStreamHandler", "formatter": "rq_console", "exclude": ["%(asctime)s"], }, }, 'loggers': { "rq.worker": { "handlers": ["rq_console", "sentry"], "level": "DEBUG" }, } } Custom Queue Classes -------------------- By default, every queue will use ``DjangoRQ`` class. If you want to use a custom queue class, you can do so by adding a ``QUEUE_CLASS`` option on a per queue basis in ``RQ_QUEUES``: .. code-block:: python RQ_QUEUES = { 'default': { 'HOST': 'localhost', 'PORT': 6379, 'DB': 0, 'QUEUE_CLASS': 'module.path.CustomClass', } } or you can specify ``DjangoRQ`` to use a custom class for all your queues in ``RQ`` settings: .. code-block:: python RQ = { 'QUEUE_CLASS': 'module.path.CustomClass', } Custom queue classes should inherit from ``django_rq.queues.DjangoRQ``. If you are using more than one queue class (not recommended), be sure to only run workers on queues with same queue class. For example if you have two queues defined in ``RQ_QUEUES`` and one has custom class specified, you would have to run at least two separate workers for each queue. Custom Job and Worker Classes ----------------------------- Similarly to custom queue classes, global custom job and worker classes can be configured using ``JOB_CLASS`` and ``WORKER_CLASS`` settings: .. code-block:: python RQ = { 'JOB_CLASS': 'module.path.CustomJobClass', 'WORKER_CLASS': 'module.path.CustomWorkerClass', } Custom job class should inherit from ``rq.job.Job``. It will be used for all jobs if configured. Custom worker class should inherit from ``rq.worker.Worker``. It will be used for running all workers unless overridden by ``rqworker`` management command ``worker-class`` option. Testing Tip ----------- For an easier testing process, you can run a worker synchronously this way: .. code-block:: python from django.test import TestCase from django_rq import get_worker class MyTest(TestCase): def test_something_that_creates_jobs(self): ... # Stuff that init jobs. get_worker().work(burst=True) # Processes all jobs then stop. ... # Asserts that the job stuff is done. Synchronous Mode ---------------- You can set the option ``ASYNC`` to ``False`` to make synchronous operation the default for a given queue. This will cause jobs to execute immediately and on the same thread as they are dispatched, which is useful for testing and debugging. For example, you might add the following after you queue configuration in your settings file: .. code-block:: python # ... Logic to set DEBUG and TESTING settings to True or False ... # ... Regular RQ_QUEUES setup code ... if DEBUG or TESTING: for queueConfig in RQ_QUEUES.values(): queueConfig['ASYNC'] = False Note that setting the ``is_async`` parameter explicitly when calling ``get_queue`` will override this setting. ============= Running Tests ============= To run ``django_rq``'s test suite:: `which django-admin` test django_rq --settings=django_rq.tests.settings --pythonpath=. =================== Deploying on Ubuntu =================== Create an rqworker service that runs the high, default, and low queues. sudo vi /etc/systemd/system/rqworker.service .. code-block:: bash [Unit] Description=Django-RQ Worker After=network.target [Service] WorkingDirectory=<> ExecStart=/home/ubuntu/.virtualenv/<>/bin/python \ <>/manage.py \ rqworker high default low [Install] WantedBy=multi-user.target Enable and start the service .. code-block:: bash sudo systemctl enable rqworker sudo systemctl start rqworker =================== Deploying on Heroku =================== Add `django-rq` to your `requirements.txt` file with: .. code-block:: bash pip freeze > requirements.txt Update your `Procfile` to: .. code-block:: bash web: gunicorn --pythonpath="$PWD/your_app_name" config.wsgi:application worker: python your_app_name/manage.py rqworker high default low Commit and re-deploy. Then add your new worker with: .. code-block:: bash heroku scale worker=1 ========= Changelog ========= See `CHANGELOG.md `__. .. |Build Status| image:: https://github.com/rq/django-rq/actions/workflows/test.yml/badge.svg :target: https://github.com/rq/django-rq/actions/workflows/test.yml Django-RQ is maintained by `Stamps `_, an Indonesian based company that provides enterprise grade CRM and order management systems.