test_nn_kldivloss/0000775000000000000000000000000014767155430013336 5ustar rootroottest_nn_kldivloss/mint_nn_kldivloss_binary_case3_input1.npy0000644000000000000000000000114014767155426023543 0ustar rootrootNUMPYv{'descr': '@>7P?Eg>`W ?FR? =y==1;?]?$4_>`?p?P@>0>1<"?EF]?l(?L> aA?>Ξ>>Ǩ>]? 1'??#?>t?(+=X>ޙD?a@?R>t ?>OL?!>V>\@?o>4h>þ>.Օ>0S>9>= P?G ?-?Ba?>N?lT>> >h ?T D>^?͹&?=k>'F?>mp`?{?> <`^?00=ƪ?>dߔ>h?Ld>L>F?n>y?^U>>_N?P>?v>Nn?p=jx=`I P?p;v>d?p>>>s?c?t>$>h>x;3?l|6>=OP?test_nn_kldivloss/mint_nn_kldivloss_binary_case2_output1.npy0000644000000000000000000000034014767155426023744 0ustar rootrootNUMPYv{'descr': '(?H&>OQg?<0>3\?I?ؾ>>qM?P>tߧ>c?l.M?M?N>Y> ? H?L+>(>D=&Q>/?g?Ԗ>z?N;Y ?@+=Mg?"?hn>(6?8?2? >U ?iA?J?6>X>t@B>5?>?T/?*?'?fu? =>F?F֞>0> =>p[>E?h>kw?С=3s@?# 0?OY??b?g>&(? j>>f+?0$?9U?d>I5?̫?c?>@=Zk?r??i>w?0=DL>`?>?? L>>B?脣=u!?XC?,>ٹ>6>w>T >T>|=Z>>r>L>)=Vy>d"?=.?"?q=`?k$?3&?F!?,? )?> p?'?,?4*? a?(D?ߵ>@>7P?Eg>`W ?FR? =y==1;?]?$4_>`?p?P@>0>1<"?EF]?l(?L> aA?>Ξ>>Ǩ>]? 1'??#?>t?(+=X>ޙD?a@?R>t ?>OL?!>V>\@?o>4h>þ>.Օ>0S>9>= P?G ?-?Ba?>N?lT>> >h ?T D>^?͹&?=k>'F?>mp`?{?> <`^?00=ƪ?>dߔ>h?Ld>L>F?n>y?^U>>_N?P>?v>Nn?p=jx=`I P?p;v>d?p>>>s?c?t>$>h>x;3?l|6>=OP?x=6*=س#>{?rw?>X7?>a>J?dR>??d{>V=ov'?jY<>9>@=#zZ?(>m!?w>Y~?`={,?V??&9?u?*?V?P?J?ߕ>0>PD=>'>,z?Z=7g?Ks?$=n>m>N>'?p??#?br>p?lH>ڊ>?=ƚ>,d>>F%>r?b> ??,>e0?b{?^?R>\>{zI? 2?t>?>VS>p=d>>0;o?L?4m>h?`>c,i?T>p>{>Ƴ;<>L9?{?D>i@?4>H"g?8>*_?oF"?:@>??\}(>M?8=?r%?2 G?z>j>P=~?>I?A >w=F#?e?ҩ?؈>>M?&;?k?f|k?`>#?>A?2>?>PC?9O?:p>>xs?=^=?FB?, >@>oK? U?M)?$J?>r> ,?^8?)>|>8i?=>K?_[?>=>>gT?]?cl?ܐ=t?}bz?F)??>xo ?piK=og?E>pJ?N?]^?l>PT?ܒq?<? ?ϢC?>8>">>>(8?K>(>w??=jY?:P?&?K8+?Z= k?t?GF?>>?b=;=;t?No?S>|/>>V?g1?:?>%n>??=Y-??&=8.>8~??>p?"a>s>>#Zl?#N?)3?q? }?b=l?>>d?j[?=?|>L>T>3>N?8?#?j>>p^?>\w#>d>p?>~S?B0>/? [>2c?%? I?]>?>Zh>>3k?<>:8?m?HE\>2k(?4>f>{?6=?~;P9?`-9t>=8?>s^?Y?C?=3z?$A>n>gd?P=n>d>L?~?19?'? $>u=F>(?T>jd?p ?`>s?h.>9?>>g$?r>??6?0a? > ? >n?<)m?1_8? >j>-?Q>Fa>??)>O?ғ>O>(%0?x~=hC?>„>d4?{=9?>|-?dv? p? '?B?(o=^?$t?+=7>h>[??@>v]?Rp?O+=>Y?AB?2==:<>msG?-[?{=@>X>i?>lR?T\?5j?i>`:?w?/>= V?И=2t>G[?vt>#?KM??"??L? >k?1?.?H>2t?n?0?M?`?<>B>>:H}H>d>J4&?>?>C?NA>oQ?vp>i?>c?|?L?/>i?7?>:?f>L?92??>W ?UY#?w?'>͚>b~>(>pA>2M?(c>h=>`U{=|}.>l%>a?q?82>ay?D=}s+?8F?=6?n,C?80>=^t?V>`?X3?`4<.X ?M ?H?test_nn_kldivloss/mint_nn_kldivloss_binary_case6.py0000644000000000000000000000176114767155430022074 0ustar rootrootimport torch from common.utils import auto_generate_data @auto_generate_data() def mint_kldivloss_benchmark(): """ benchmark: torch.nn.KLDivLoss inputs: input: ((2, 3, 4), float32) target: ((3, 4), float32) reduction: 'none' log_target: True outputs: output: ((2, 3, 4), float32) input_grad: ((2, 3, 4), float32) """ input = torch.rand((2, 3, 4), dtype=torch.float32) input.requires_grad = True target = torch.rand((3, 4), dtype=torch.float32) reduction = 'none' log_target = True net = torch.nn.KLDivLoss(reduction=reduction, log_target=log_target) output = net(input, target) grads = torch.ones_like(output, dtype=torch.float32) output.backward(gradient=grads) input_grad = input.grad return [input.detach().numpy(), target.detach().numpy()], \ [output.detach().numpy(), input_grad.detach().numpy()] if __name__ == "__main__": mint_kldivloss_benchmark() test_nn_kldivloss/mint_nn_kldivloss_binary_case3_input0.npy0000644000000000000000000000114014767155426023542 0ustar rootrootNUMPYv{'descr': '(?H&>OQg?<0>3\?I?ؾ>>qM?P>tߧ>c?l.M?M?N>Y> ? H?L+>(>D=&Q>/?g?Ԗ>z?N;Y ?@+=Mg?"?hn>(6?8?2? >U ?iA?J?6>X>t@B>5?>?T/?*?'?fu? =>F?F֞>0> =>p[>E?h>kw?С=3s@?# 0?OY??b?g>&(? j>>f+?0$?9U?d>I5?̫?c?>@=Zk?r??i>w?0=DL>`?>?? L>>B?脣=u!?XC?,>ٹ>6>w>T >T>|=Z>>r>L>)=Vy>d"?=.?"?q=`?k$?3&?F!?,? )?>test_nn_kldivloss/mint_nn_kldivloss_binary_case1.py0000644000000000000000000000174114767155426022072 0ustar rootrootimport torch from common.utils import auto_generate_data @auto_generate_data() def mint_kldivloss_benchmark(): """ benchmark: torch.nn.KLDivLoss inputs: input: ((2, 3), float32) target: ((2, 3), float32) reduction: 'mean' log_target: True outputs: output: ((), float32) input_grad: ((2, 3), float32) """ input = torch.rand((2, 3), dtype=torch.float32) input.requires_grad = True target = torch.rand((2, 3), dtype=torch.float32) reduction = 'mean' log_target = True net = torch.nn.KLDivLoss(reduction=reduction, log_target=log_target) output = net(input, target) grads = torch.ones_like(output, dtype=torch.float32) output.backward(gradient=grads) input_grad = input.grad return [input.detach().numpy(), target.detach().numpy()], \ [output.detach().numpy(), input_grad.detach().numpy()] if __name__ == "__main__": mint_kldivloss_benchmark() test_nn_kldivloss/mint_nn_kldivloss_binary_case4_output1.npy0000644000000000000000000000570014767155427023754 0ustar rootrootNUMPYv{'descr': 'h 񗗿,X&؄.5X:-C ̿֊{rUE gT/̏TЄ^ݿЮi- {hW_s*ovɚKG"gĀР俆渤 2'1]qҿ=,Y޿ᱠe9"=(@BHQʿx<Y{,Hq l'`m¿0! "{hB︿ϨaϿo8 ( kN9[ҿXܿż%M*{ֿͿ[`k|!ifktۉ.Qiր׿SiLW?޿Gſoѿs:ݿ6SͿzпi'І<0WlO\١[ݿ뎿2Ooc(vㅱ]ڿ:Yտ;¿z㇚~ݿ$qC͜'ȣӿENʿ6QƿcH a!L({w8 ṿ]΀A!6l9ɿ_cǿGƿſ駿ѿǶ\Ѫ= &?*"2e +zB릿 )ȹPh?ƿ#gw" &w 9(4&PK{6 'CLؿivt*bs뿯ƿd߿' e5n+ſ_տW#/) Sӂ8恿ǤmٿVMĿ)u+ǿŃzD߿Ψ! Uǿ+ȿ<&rſΓǿ{C~3pӿȳ5ۿm!Ƴ*׿JXZ˿- p))yZ+Ѳ2J#,Ey 3Mǿf#  8ۿwQ˿ɹO#ꡊJݿJֿ>X㈿): =s=KͫF!#ۖ)οu𗓿0(ox+/ȵpdsW}2b{ƿ|Ͽk?%lOԿ_ǿλDe P#lm6 T޿5㓿 鿋W <˿aHͿ**/:!&ߐ߈sI63-zD'i(߿ؿ|ÿ`g mڿ( !!XP)eOzܿp]9傿 ,ڛſn~߿L 6'\b_࿕r(bҿ5|{TZ+,&"Bۿ{U;!~דbRǿڿJ TοƸ`Zv޿eɿYs-߻z$+uÓϿ俜  ۿ &ݿ'9-;/4{6"ǿD ,9  !¤*bX~述(&Eo|rS6q'ݟBQѿ $* Gȿ(¿ұܿ%7㮁r9qdf UtO[ Κ5(ݾ 1Е#(+X@ޙľaҽt 3O̾!V~\oj4>.0S̹G оGBξlԽhT Ľ޾͹kQ'ƾmpp `^0ѰƪdLLƾnH^U e_ξPc舾vGNpj`IbA5 Pp;dʟprHRcՆt>hSxi3l|̘O<;cHe<ݾξK4k_оtest_nn_kldivloss/mint_nn_kldivloss_binary_case1_output1.npy0000644000000000000000000000023014767155426023741 0ustar rootrootNUMPYv{'descr': ' Ui>q>0g?'?>?I'=8.6>FɘF1|>i %>X>xw@+?x<test_nn_kldivloss/mint_nn_kldivloss_binary_case4_input1.npy0000644000000000000000000000570014767155427023553 0ustar rootrootNUMPYv{'descr': '>g?V?;?q=_5?fT> #?Tb?8q?R.>>˚>>;>??F> 1?'?wJ?~?;'=?i?ta>,v??#? ?p9=>D?}h?>=>`P?Dc>&>>. ?=WZ>B>2>K{?>f>-? >[<=>Rz?L^?{U ?" V?zy?榽>K`?~A?<:?KJ?\<ݵ_?72?Z?\p?5w?}h?w=,>h)?oEp?<ʋ>̆^?:5>~#>& ?s5?0?$õ>@?Zu?HM=t>3>pU=p<=Y$?W?>ũ>޲H?>z=θ>r>AdI?%NB?;>/>`9=\>4>'s?>R?e?<|d>Nl? ?_-?j>q?$>?>4v> >l,>s`?Jc?>;->{~?`?t>@(=0c`="e?B>*>?>$>x>z=v<>#>`<-?'>W?Z>D>V=7=Di?k ?>T[?lg>OIh?u;>fBP?3?o>[4?>Q<@D=1?Fo?xB=-?A?ILS?e9?h=^>>:>Q?l>g^ ?h>g?gm?d;>mlw?lv>p=/>DD>f>"!?G5?ߥ>0=?C?>7v?Y? M?X>E?]H?-/?h> O=F $?H?4a>A=C]?%?VM>v?X> ?`<>?dT?,b?:t?>0=ҙ>{+Y?Mg?8=r\?L><>=;4>P=HM>^;m? e#?g.?|É><>/>*? ?}>g >>L`?`R>;?<?f ?by6?B{>>-]?>E*v?phT=3?`=p=Y?i*?6[>Cp>I>m~ ?=G>|> <=>18?u>?m>`4?zd?;N?2t>4f>H@>n} ?q?P?5xv?!?c?2^>B,>>SE?"R?l?+?;8?7??V>7?>;l?j1>}!?XV?Q?>*>i>R>>t >2e>R!;?6>n>c<*;>c=n??uC?P;t>>0w>1?R?vD>u9?֎=an>?'>(=l=|De>&j?,?'G? x?Pt?p=˶U?~=2?M?+?$p@>+?<{?GL?B_??>7?B?;>@<.4? >j> M>?0i?ٝx?=Ѱ??pW0=̲[:?F+>f>$?_>L?Ql?86m?@d>7>U?g1?M?>>?>HT>>"=:>({>,?T> ?d .?ZX ?= N?=S?Ͽ=>:>J?zy?X?L?h=ϑ|?/>(jR>Q>d]?hʗ>-K?>7>M?i=!?{ ?6]>>PW?9p?(l=% ?@=z?:0?VV?0}=y? E?,?>>0;@>p?ٿa?A(>h=>k;>Q?K|?F>)O?ٿR?)!?0><^>f:>h4P?h=72?>}7?R)>>ds?k?>NK>`Z? >K?o?֤=J?^ ?P>N$`?|?J?d>-?>z?Ї3=%h>Gm?|K>H=Mk8?>K>he?K~?A?0v?>->=?a?>M?5$??7?>E]?$?lsE??7>D?zm? .?Mx?SY?YT?'s ?L?0:7<j?PP*=G?=z?*s?3 >H?B > ?6OO?F?>Z]>$c>=>%>L>Ԋ>E>ZZ? !>Ms=9=B~?=)E?>>Ƙ>>Z(?Վf?^)>OJ@?>x=?L?\Ƶ>L>d> z?;?=:8>?@h<\W??s}?I><>?_<"?,w>5#?>??Hu>\H>0Q>?w?>>v4?>з;@h>\? &:+"?е>9yO?9? n?H>? ?h?h?m?,>k0?B.?>[ ?}aC?Bt>>Aځ>v=<?`/i>Q>?q?b|?-C/?>?j?C?D?W ?T? ?>v?;S?h?A?x>\f7> h?n?>{3E?,}?p-?;'>LQK?G?!Z?צm?,@?{?)c?<?c=t??87&?L?_?>'2?@<#4?(? a?c>N?B'?O>p?J?n5>_>0>S>cc?%? ?-?@ V<5@?>8>A=8?>'>ZN?0s>vE?*;B>|>k? '=`>+(?x}a?`<:=test_nn_kldivloss/mint_nn_kldivloss_binary_case1_input0.npy0000644000000000000000000000023014767155427023540 0ustar rootrootNUMPYv{'descr': '(?H&>OQg?<0>3\?test_nn_kldivloss/mint_nn_kldivloss_binary_case6_input1.npy0000644000000000000000000000026014767155427023551 0ustar rootrootNUMPYv{'descr': 'z?N;Y ?@+=Mg?"?hn>(6?8?2?test_nn_kldivloss/mint_nn_kldivloss_binary_case4_output0.npy0000644000000000000000000000570014767155427023753 0ustar rootrootNUMPYv{'descr': 'EHv;8}a?>?T>f=I??>,,w=;Kgൾ롆?݃?Ň> V?gn?EA>d?A?f?8}ix>&1:>:>GUh[g)d>ʲ#I>?'_>,C>URj>?=xwDL@bG?>=r;B?fԾU?"t?-c?u>m^?i<%d2Z/`?N߾\y?_þ.{?U>q@>ji}>1> V?`>o>#j>?U=H?ه>p'}Yy-/z>5 p> q2$YK.mnkf?T6t1=o.?Kf?s}¾;$>] P'@>GE!>.)?ne?UjR9U&@W@x[F"ơ?1 "Qw5>!ܾX3;gt}?_9+>Yڇ>w .>>*>]?7%^ߩ?=? [>`*`' ?5>?A?k <6=ϽD> ̖=.;)5>}x> >8?+@F^r?/f*>.҂=s>ya !u<;?.귽]a? >Y:B?z?;?tK?nɾa?X#f>>Ftþ !??kϾ3?WMA?&˭{?-)G>q:k=-Lt>#3?@?yEp6W;<Ž1+PSv??ly?O= >Q5O$x?_ I; "{?>>R 092^ҽT>뺾 ߖ>?Nb7? @ ~d$ =L?H JPO?>Yv>&(jB1D >;Y+k!r?sy?#D5٧=V5>?1?8?<<|?nw龬>*Pr)ܽT?> !=x>-?f>9?ּx>w@?GS=?H?7>ǭ==>ֵ@?wǥ>Պl?>?d>ObӞ?3e>ꇊY 4Ǵv҇?e!>FBPQq?G>n=u}=>P???K>+N?C9Ⱦb>?j?Yk?*>qM?2@ MܽJGʾ1V>>^4} վ?c?KXI8>% ?⼂OJ =N"?~a]?%%?%?_?BA+6Y}?ӒUCP ?5j=(8 (:+&>Z (e?>x=<Cp5?Q @O?38?vφ7?)>uL=H=iwI?o>8b>6{ F7%Cך=t[?*?E<]?"s83Ww?!g?j[r?ye*>ӽ,<^=*kA3@b>ؾ= ?6~S=J@=d?7Gi#gI } ?>}ȾZ&Q>(>̅6׽?S>C'Wc$?&Ł?b0>.k?>?IԾCXr?Tlj?++tc?=G7?9=l=iuh=0Mb?o^?+|sH?],D?\) ?`w?'>aGb?: @5t8?J?u=K92$~>N?t?K?^oO?RpR>xH-!{?=@Fb'dD &r?%>>^p Ҷ]*?6˗;xw>J)>dsEI>%GNV?"> >ѾA=f'?=c?₾@=x.pоW?vkc,@$^[_=$W=CpoS>I?(?Eɽ@=SPP],+Lh,?2 B%? >Y?pE>>j?=PBZ?Y>/>ucDl?<>P8=A $.?m)>>nO>%0dQ*оFBe8?XZ[=`G?<j+g K'">夽Q?0?$=?(?? ?Xa:?v?I>*?%>5 ?rah??=udk>#?ƞ?G>7>3?a?&^e>U q>!> ϱ?~]>V??[&??R?B?> ?0A>~qn>%՘?󤏾2>W>e*0%?0lנE/`x>m>sR?Etest_nn_kldivloss/mint_nn_kldivloss_binary_case2_input0.npy0000644000000000000000000000034014767155430023535 0ustar rootrootNUMPYv{'descr': '(?H&>OQg?<0>3\?I?ؾ>>qM?P>tߧ>c?l.M?M?N>Y> ? H?L+>(>D=&Q>/?test_nn_kldivloss/mint_nn_kldivloss_binary_case5_input1.npy0000644000000000000000000000024014767155426023545 0ustar rootrootNUMPYv{'descr': 'z?N;Y ?@+=Mg?"?test_nn_kldivloss/mint_nn_kldivloss_binary_case2_input1.npy0000644000000000000000000000034014767155426023543 0ustar rootrootNUMPYv{'descr': 'z?N;Y ?@+=Mg?"?hn>(6?8?2? >U ?iA?J?6>X>t@B>5?>?T/?*?'?test_nn_kldivloss/mint_nn_kldivloss_binary_case5.py0000644000000000000000000000176214767155426022101 0ustar rootrootimport torch from common.utils import auto_generate_data @auto_generate_data() def mint_kldivloss_benchmark(): """ benchmark: torch.nn.KLDivLoss inputs: input: ((2, 3, 4), float32) target: ((2, 1, 4), float32) reduction: 'mean' log_target: False outputs: output: ((), float32) input_grad: ((2, 3, 4), float32) """ input = torch.rand((2, 3, 4), dtype=torch.float32) input.requires_grad = True target = torch.rand((2, 1, 4), dtype=torch.float32) reduction = 'mean' log_target = False net = torch.nn.KLDivLoss(reduction=reduction, log_target=log_target) output = net(input, target) grads = torch.ones_like(output, dtype=torch.float32) output.backward(gradient=grads) input_grad = input.grad return [input.detach().numpy(), target.detach().numpy()], \ [output.detach().numpy(), input_grad.detach().numpy()] if __name__ == "__main__": mint_kldivloss_benchmark() test_nn_kldivloss/mint_nn_kldivloss_binary_case5_input0.npy0000644000000000000000000000034014767155427023546 0ustar rootrootNUMPYv{'descr': '(?H&>OQg?<0>3\?I?ؾ>>qM?P>tߧ>c?l.M?M?N>Y> ? H?L+>(>D=&Q>/?test_nn_kldivloss/mint_nn_kldivloss_binary_case5_output1.npy0000644000000000000000000000034014767155426023747 0ustar rootrootNUMPYv{'descr': '>qM?P>tߧ>test_nn_kldivloss/mint_nn_kldivloss_binary_case1_output0.npy0000644000000000000000000000020414767155427023742 0ustar rootrootNUMPYv{'descr': 'test_nn_kldivloss/mint_nn_kldivloss_binary_case4.py0000644000000000000000000000203314767155427022071 0ustar rootrootimport torch from common.utils import auto_generate_data @auto_generate_data() def mint_kldivloss_benchmark(): """ benchmark: torch.nn.KLDivLoss inputs: input: ((2, 3, 4, 5, 6), float32) target: ((2, 3, 4, 5, 6), float32) reduction: 'none' log_target: True outputs: output: ((2, 3, 4, 5, 6), float32) input_grad: ((2, 3, 4, 5, 6), float32) """ input = torch.rand((2, 3, 4, 5, 6), dtype=torch.float32) input.requires_grad = True target = torch.rand((2, 3, 4, 5, 6), dtype=torch.float32) reduction = 'none' log_target = True net = torch.nn.KLDivLoss(reduction=reduction, log_target=log_target) output = net(input, target) grads = torch.ones_like(output, dtype=torch.float32) output.backward(gradient=grads) input_grad = input.grad return [input.detach().numpy(), target.detach().numpy()], \ [output.detach().numpy(), input_grad.detach().numpy()] if __name__ == "__main__": mint_kldivloss_benchmark() test_nn_kldivloss/mint_nn_kldivloss_binary_case6_input0.npy0000644000000000000000000000034014767155430023541 0ustar rootrootNUMPYv{'descr': '(?H&>OQg?<0>3\?I?ؾ>>qM?P>tߧ>c?l.M?M?N>Y> ? H?L+>(>D=&Q>/?