From 6e42f98a47cd0d8cf7866034717c11e0fbfeba7a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 3 Jun 2024 13:58:38 +0000 Subject: [PATCH] Deployed a4bc8b7 to dev with MkDocs 1.5.3 and mike 2.0.0 --- dev/404.html | 144 +- dev/assets/images/social/reference/cli.png | Bin 0 -> 24774 bytes .../jinja.png} | Bin .../social/{ => reference}/recipe_file.png | Bin dev/authentication_and_upload/index.html | 150 +- dev/automatic_linting/index.html | 166 +- dev/build_options/index.html | 166 +- dev/build_script/index.html | 150 +- dev/cli_usage/index.html | 150 +- dev/compilers/index.html | 150 +- dev/experimental_features/index.html | 158 +- dev/highlevel/index.html | 207 +- dev/index.html | 144 +- dev/internals/index.html | 172 +- dev/package_spec/index.html | 150 +- dev/rebuild/index.html | 150 +- dev/reference/cli/index.html | 1872 +++++++++++++++++ .../jinja}/index.html | 458 ++-- dev/{ => reference}/recipe_file/index.html | 996 +++++---- dev/search/search_index.json | 2 +- dev/selectors/index.html | 150 +- dev/sitemap.xml | 65 +- dev/sitemap.xml.gz | Bin 416 -> 418 bytes dev/special_files/index.html | 150 +- dev/testing/index.html | 150 +- dev/tui/index.html | 150 +- dev/tutorials/cpp/index.html | 721 ++++--- dev/tutorials/python/index.html | 424 ++-- dev/tutorials/rust/index.html | 235 ++- dev/variants/index.html | 150 +- 30 files changed, 5651 insertions(+), 1929 deletions(-) create mode 100644 dev/assets/images/social/reference/cli.png rename dev/assets/images/social/{available_jinja.png => reference/jinja.png} (100%) rename dev/assets/images/social/{ => reference}/recipe_file.png (100%) create mode 100644 dev/reference/cli/index.html rename dev/{available_jinja => reference/jinja}/index.html (90%) rename dev/{ => reference}/recipe_file/index.html (96%) diff --git a/dev/404.html b/dev/404.html index d4533f6ae..f6a5d0e77 100644 --- a/dev/404.html +++ b/dev/404.html @@ -406,11 +406,11 @@
  • - + - Recipe file + Advanced options @@ -426,11 +426,11 @@
  • - + - Advanced options + Experimental features @@ -446,11 +446,11 @@
  • - + - Jinja functions + Selector syntax @@ -466,11 +466,11 @@
  • - + - Experimental features + Build script @@ -486,11 +486,11 @@
  • - + - Selector syntax + Variants @@ -506,11 +506,11 @@
  • - + - Build script + Compilers and cross compilation @@ -526,11 +526,11 @@
  • - + - Variants + CLI Usage @@ -546,11 +546,11 @@
  • - + - Compilers and cross compilation + Terminal User Interface (TUI) @@ -566,11 +566,11 @@
  • - + - CLI Usage + Authentication & upload @@ -586,11 +586,11 @@
  • - + - Terminal User Interface (TUI) + Automatic recipe linting @@ -606,11 +606,11 @@
  • - + - Authentication & upload + Testing packages @@ -626,11 +626,11 @@
  • - + - Automatic recipe linting + Reproducible builds @@ -646,11 +646,11 @@
  • - + - Testing packages + Package specification @@ -666,11 +666,11 @@
  • - + - Reproducible builds + Activation scripts @@ -686,11 +686,11 @@
  • - + - Package specification + Internals @@ -705,12 +705,52 @@ + + + + + + + + + +
  • + + + + + + + + + + +
  • + + diff --git a/dev/assets/images/social/reference/cli.png b/dev/assets/images/social/reference/cli.png new file mode 100644 index 0000000000000000000000000000000000000000..fd89aa0e412eef2edba91e92adefbfc8a45215bc GIT binary patch literal 24774 zcmeFZhg*|(`!;TCmG*J)_;n%zt)PgCC}GG<`&hs*%TQ$7G9t?eVTBMxGpvA&00{{YNC;WK5BB?e-+$sg4mkvmklgomU!Utb&+|Gj|M4lkdNM{L^0X4_|vQChph4Zr=Rv z(9LhP?&hYV`L}E15rxZ*Qu!&5?8*!SoHZjume}sw`i5q~HleH4hAnm=39hZuL&V_S z&);KP9@(J1`^}!Nvv06KU^B7KR{eP8olZeDwA)tHu76?oa!sJX-%WQwaSygV3MMCI(IY zX~ld+3BTz&bgkf|npEIm+)v$?Tsm*V+Q>S1Xf*x0A}X;lJKOd|Kc^r3HhyF<8!MY^ z{zm2Go;|;^pJ-8YpG+g-#7I-uvSTn@)r8Vlm%#;hI=nM|>aZHzS9bty zt%!DZQjZUFk}nc(7c_A6+5_URTt{sDXfMl;s>UeNgdyV-75<4vRi&~wP$G>ld{qH2 zE%fK2z#HiuqZ+K-D04HT34gq9^UWRhpLSOWxgz;rz5N7}XEAxV6`?|2{Mj5|$%xhmSZXb=2iv{&PUs!=!AaeGq(xF2 z=Dk(ZG(R4jzi~!&YxY=H_OZtzM%%OP2}>o1<@*+AEem=$Cb0?IBK7WfGqY$PSx4#U zd~K)0JO zv&7K0O3k~N2mBJ(h!XTDdv*e=i)mBM)KZ>WAGrK>rq=uICv4G$G|KSJHp}OR%Cg|a z0>yZgL(|O6`m5Ig>kR%)xahB=xr5Exa-)_;i)R(S!i-}ddb&(#>R%inup91cbodZeBHyB zdl&TLW1CBT6V~puS-zl#%D0LZua%(_OYGa{i>iyyRK7D!LZGK`*Nofcndwl1T<#g< z$nRFh(myou%JIRWRg3Gl&;lOot@e!@^s0`YN){Mc;9!wMvWjZoYnlp9m!27SOigs39v&NRw!m?+ zaz56SxX?ctx0>%AC7<84zi(GH-7hTIi&`3(64%t+@9mrm`xcsk)hO+I-V=@zC$oc= zA6wuVQ@Ju3*}OIcK}nd%=8hw|@K)Aln;oRMW$TT;CPK25+ilLiRC*dp48#>Yq zbE#4q(K2$Zc1||wc&pfDe&skMl6o8hi4_(V-84e1?p{;hqxA6$>U;K__jWdUdoj49 zoE{h#clCgtG~0f~_r*HT>sF0&z-V?0PRq-961UuSXTabM*59aFY@hADfe8+_*oaMD z&(+7Et~4#SW>PF+@v+mHrRvglyb%&*dfqSUuhAOxr=xfi{)c7rx6qU9|M`M*!8cNt z?~VD8qM4>xU_rA~2FC@pqV@}eB@G(pi5Zo49@9HY zth|)%M5inMUNAw-W<199yyAJs4p&OoM3<@X_P5`)ORyRyA#Nta?dgMMuaAi-i1w>2 z#p=o8!nw7;H#L}rzoeUaSEwv`U#^s?O}N=MeFm8_+M4jgV!x^R<->{Hyd=_SsiXiIFwfv1QQL|Z$-LEy)Wj{ySg7BjLgL%W*tvZHfj^8 zsN(lY5BL(rCll<-hlO*>&x(u~dC^YN8>dq}!PAua69&VE>cZ!Am+`e{)e5>4j`<#6 zj(igmtg}IA^ugS@tkNf$PK>T@mUcgQaM}GbkpSKPXtzOXkMtU;hKk|%zE5Tpu~a$) zqP4Dm?Ml+XN75%oFpATHpf{*Jnxk~ZDJ&gJ{kF@FH_fhi*vMLUn~X$w0J;3ZRnXU- zII7}vJf=B;uX?3F9jmDGmrqrU+9_~Cm}MgVu;m;THqaYb3`!GMV;H391%ScKkFlEh)oUJu3J+JliE>A&9kPa?_u zhZ@ZAEAI!>>lN!P>EKG{X>4R{XyO*zogh%{YvTbSy+9hALm6c0D^Ep^y@6jFn^MS_1K@>`$e8$Yz!e7`A3oawf`JLLc$MF%k z=l+pYzn=1^as(lfJExr>n-9(j;-P9x)=K>qtVwE+E;`mSAJam-e%V(o&re7Di<*C-l&B-%!n9Pxd#k|BxgacicdCMkm@$JokPePG4YzCSDoNZt!0J z6dO3e;5(Azxuq$mw_b9`<89PiI5$uma;O7tQ*^)E!Y|%y1S%G(a9Ii47*B2XN)Gli zWYHJ>7weBfr+215C}iZPP?DKG$_w==ChHa})y z+BBJ?X8D-5j%AHgl7X7pjL!E+f_QZ1;jm?CyzGxVS8E_piGN?0G_0%o{bW45!F;W| z`kF?m8NxYs8dSzEOwGC+^ZY~K{ro(4N|c;Q=!ALI|9J^2(99vM2JiM4#CSeu&@gaQ zgeRxV9E_^Oo8P1lgqcfPrd}q03o>n#<0mV8HE4GgBOQxSbqh!h&Fa#0 zK5TAM5pwSye8fT?%WR&#RHpVnJR#pFuT(=a!5xd@92s>h4_D#){*v#Z{+;;`uBG)0 z%ZDTaHZe}Wkwwzo`)DgFreVIkFX+q>w4BAV#EaTwE0gh~4GF7hRZWUnA0`F)Ej5TS z;@LxXr}om@#C83-5dnr2VJ}|1GezfFY!xEJz3;J<#D&U)CHES$Pq|pRs8pb2!-Oom z)j*FxcRB-u5oVFchyA2enPMzf%}r`S z-7IF>TS^`u64PtKKr?#;&?X@dk)MYUmk2t&F`I=5?_9maaKU?GN7H;)JJ!R~vy(SX zlze)UAWT*z1$trQ&o?eujnZhbZE|T;%ubEUo;`8Z!nEZkZgV61p%*Lb{lkLYEOVr% z99JqF`NnL1G}+b;UP5c$`0v}HaZ5weNs7J&uP@JM@)i=~I3GX*Jd^-ETWHy`P9>q7Ct>r z^xJvoBgVE1>Y^Ouh0r8j-K^G)Vu3)J=!=|`q_X+D2VT6Oh+g;xo8u?mYGD|srP5e} z4D`mrM8g@W0Kj0>Oooq`5$?5-5$i`BVROIOePY$k3GqDCUG_Kh<*?lG!R#Dgs1P~R zE}sywq4$|_Yjm%ML-V_Mhv+x`0;*Wev#|bp&mRA!{5q|isWm&)l2Ew_e>UrekrscV z(TGEo23%?WM4cO#whqJzMYQZ}+U2HsNm~4UNf8ro8EVylpg>3`T0i~1IQd!`u9)s3 z1ykWJHHSd0tkSP^!7{Dl3Xx#D(0yn9am{_& z<~1Wvgh8%&T+K^x$XM?>cf0raVAt_CodVIofG;&G#{tT_LyxY{vZil+ENtmAm%r0& z*;vPVXKBcJ$?4l%0FdZSzeaB*;r)43VKE5oiJHUY2qZQ{Gqv8Wt9>^sTc6nzVuGtt zV#GO}!(K;*!iHRH4xJH<2azJySQ>4TLXW8R#z5XH_mbE3>LLp1#B{#TghqEtlkpir zt2O3=m9vAh3RS74A<-h8B`{M_S$4vi$4w}axGRkT!oPCVVy3dmL|XN#GF%*upf4wC zA;v2!A{Xqi7Q@+vP}-P@xYfRZd=}*UMm_EEpbe<0+Ra^zqO_3vZ{uUGv;>4K_UAr_ zt?H_ru7(ee$akTsdEE2uh-%NewI(up=#T1OJrkZs5GuH6Us;!~YS+O#$tUx-k)Plb zuL}Fc_tt^Da*EiHrs;QZ*kbE1@0bp=bd(Q0#ZKR!Hq0ciPVd(FIgvERs75Th?7LHd zH&J{rxqUs?NHXqS|EUX{lDXGxgFGs@triG|W(Xn@dma|l-=Z^0`6WVKx+8H>Rq0Jq zWfjnLY~-}TX+x;RmcQQ8iwncqw5XU^b#~?1zT0wd&j~cGJO4%8mm|TO;Wb`3V+;V1 zJkxkt^>DA+T}QWA(Yl=?&L00~1g$}LVyDa8(ivI`DQgMkDw@Yyqq%&L$@#gU7<-M# z;LRnMX+wCMEB|^iME>@YuT;*R+}Nr_OZj!NqjStgNsoC#ml7m;B&%sNx3Rd&pIhqe zIdL97jT==H$gP|MWi2_Zj>+DeiAPjxV&mnVKEv zMo*S8=4xj1iB!~m=U@k$t`vjQh3K#JnwPu=8AgHqNbSGM{;su{;P)<)iB4_n+{8{R z=N@qm{!Eb*k_Iu zuWSV+uJ4Uqtd_)_iAe}?@{%vpeJ#EdSqvMuIsBdXDXpP&)X=@ZpQYfe8wWzw!0Q|M zXUxOr(#gRXTz$jsjSd!ao-_m7=*~o^nccUuJ11C-%3eIu21<_E;yOZsu-iZ=K%j{c z#V$p0iI+?E?)eo`9oma(+AddQ*zk9%4j$(A7RJi00VYuDiW~H0gpJhgWVAby25w4< zmhtFmW5Qz<4OaIW#XORY2#mW&KSCU~5yIa;Mw}8fuc}g>S8>`C*63Nbb|tjdv&mNy zZrJvNRf|cW zRl>wGdVYCABgD5~?fEs+5IuW|6u?~CzaojL>75fLUZyT)wA=8;;=F{bi5FpYYri2J zL%mFlFCm7Wx>lsheXseCpgbaHw>;j&StY&dt_~Nm-%m_K7TX&t6~Ew$(7luOu~EeX zuLDOL&>Qcn_`R7T1uud%*4;$^SLNWJHSIBf*9uO=zz2S-u4IkHj;$ZhNo#vKUAw8i zFwl^!x74G7oj=#af9Vrkf(Ha%z|;%t)@{5`_{d3T(=FG3oK~cTrd^lsViZnwE?yzs zC<&Xm_N>febyp8SeXv_BcF~C(5W8+nr%&d|Fz`i(<3|H`Lkew=b_KD{3pd-(fHdj$ zU0E>+IVc^aSM^b*XF*?|WDp+<(i8ev#%+;5m)PH@Vljb8YwOcZD;vuwnei3JP!1kJ zrv_~pE9x&IWIe}i5{y94PL%954jRmshcS(w5p=4W03fDUbom4N^JB_EK6Nl%erw!} z9YA~Kt{x59TEEU`@af;Pvco(S#bFZ9VgNtrs5iwy+dR7?*gZ2?Khmx$W z#osS>fprxHH*J)#ZjwCu2vvU6+%CCEn~M#de}^-(!Zcz(8Lb04C+@-gehEoHg$NLBMWv&QLyE}rzk z?aA#5*nrV2tAGT!)*;^8SQ~qUdo>S_vOoYIj1jgW87WW{L*N8 z7eXrkjMzhKa#8;o)y-#R5oPNOD2aq;?f}V)1L9Id)qlrZ5ZBwK?^_tq&VRMh6C?bm zx5G~c0aV(h#^x!-1jzGg{dvg68_tCT{b*>UQzHA3aaw3K_+^9B)cQtr%D?ygdNR8V zQq!B#G#WTM(xPwh)+|(AHWB72A$mkEivp)ntQYW4gK=axGvsn~`wNE(g`q3lcRinz zoFCgGDv70MGMg5vcVEAp8~MWt@9ipW3&r>vWm6Px&zdGP+xg1}d8`-jAFisXjOWB- zu5OKg{3cb@Kd3_8d!&LH7BrYkhl_+=hx31e6-RdS4^i0(AA&N9uI+3GAfvKwCxw(- zWn^cM98?}9AvqSP%`{O-I3s^Zm0D!dY?9zyuVr=4${8hOxx$>wN?hC^WoSpxkcIX4 z)b;iO)4J~!NKd}A0Rpp?2ci})rsDZ!- zF1ePSktul4+y-G8cu30XQo+Y}4S@DLEnZFxyqE<}{uX@!&}2;@G?V0ISX!&V&zL6} zvfin~@>jUbM68gPpoMAU=$l^`+BFkUMTkw-1nz9v=o7}Ug&|Su( z>}s~t>SKLh1mVU%63k=x8pQ+m0!D>4xYhLhHTvwGhWpL@qH4}ke%662lG%RW?Tv2= zl#c=o?69k6=COoi=}fHe^HEC3>=EZ*sLj*T)%MCBcM9bkQM~;}yV@^){0E7jR{i?Y zTO)#fCB0Vop@{i^<#@K?DHKh2iT&SObIujU0tr@l&r(tBfSOai(w$sGge>|ew@GX^ zvT6`LEEy-)yzOTNjKvRY61^`Fza+{cDg22m{e6D=dXN&bHY|i}4Wp^XlrakFvKTGqOTLV{^%BsLg6t zcW<8M3){XSw~FB~Pl4Dxj?LGG0aE^*+EmV&GqC}2enu7;yVI$QZ%1CL?P#9nHiTs)?li5R`ta!s>+H z5^b|=wQN|GSzipDh&}8$whcJ4cYQ!)5Iy(#NVe^cQV0`-j$_1wu1u@~B(xq9S~dSl zP#Q3jl2%{u#u)YM10nQitmV(>GuV%MrmBL&%K{MmwTQyufvD()dFhAD!Rg!7w5dlP zjV~vmg(D-jB~gt*ci#}?4>QvVd-kmTr#51%J%~@UsgpLx&i|;Pop2kH3JUne*GE4# zA{=(ZKVdwb`;g+Glnz1&U<=sqGF77F4aFQ&t)QC(k`S`#`PeI z8Tvb?rWZ@0IbD45>M^SffS12@4f00KbMvTY61+?CMW+sVQiC+9olAKql5P9G(|dZF z!`|%I`~ybvu*;?HP5;+z<%6=x(A>hg<5r$tNyn%!!&p)7>MbH=2CEUGTjkRv%xW)4 zqcwGjbs|eV9!BpU;!rC+M)M3g@lJi)WKYd>3$iiz5>zHIG z6tz@ao5RT-=)S3G7E2t1tnEf(#Bib209k%uf6~z;_gy~kPysC{@Z!S?ukE1ioR2MY z37IAb5dB>7Xfd*PUs;1atZ?9Fo6)n`S&YiEPXQ>~2bEW0h0{5+?=6B2a`8=Ts*C{> zgu!*gcA~8rTdV0%sH##bHgDv&7}n5D@N|@do`6M7Qs6F0tlehfO!DFs>lVXxi^c) zb3x^U1dgP7M06BQ$`(XG49@}B`>?oeRc58W}f#Zn$6Wpt z!LAw=nycf27($NiefU&2p}fd^4dh_kld$uzJwzT*^e)l+TqPTM#RFQ}%r3Mk@t zhS<#6_dbOc6%2DOzR$Ke_n67rr`Ofqk*j7d;RS@L2?~Mggf&d-kJGSc4^=QsP(KS-eKV)0D$cLi(7*a0z|i*03W1?c{YEzLn|cL}Hq6Uzq*tfL~6yD#9Uj z;(3()3M>@uWNKPkOb)jlA1^Jb3Xi;~>r>Vcb>7M=$rgP&wdkg1RaI5f{0b4~e6ZMA zW6HD0gRg;{_E)M13h8JAs2RMt$Yu39chDLGrVBqM*&2cwjG%v`ItXVFs-nWPOFFAt zHHEtWqCAyr2QrpS7QE@w<&b5;Mk{4*6Ru@3<}Fu4H`DxS1thz7V7~b)@<-A&+{=fl zyCa{Wkiv}DE@fIehg*N)KSL@Hy<#GBg2Y>V`*H|Qi~N4}QO)-Wi}@(FLey`jXxC84 zmDmw88mC(XYon~Zm+v2Hr5>{als@3W1MaVe+16EhBJ9?x%IUF8K@K(+%z2-5ShBYN zCI(Ke=ml;J9YB7tvwU`g=yYZ*){ob|S_r$?v{Is6Tp0TPyEz_J=7$R1uN04*4TL^# zvh$6d$&6QTlC>>QwO9iD%j<8SIrJ1;TJq~H>(2(4{JjkbsS6=d%GdwcsIxWx02k0> zrPsrKTw}R0|0=_Vw^Io6KU>E_aJaJ~z#z<`G`gVP`kLWM->pkZayL6W!(c>3Kwq;S zdloJKWpXYh1#p80h?T8bP0P@cr~3M>P|Xe-%Bd=5WnkQE(L1lNRq5hN{OrqVz=xcw z61VM2pFD|&m=EQ7hCfJQJ{KKY(+l|L%D#AnQ6(WpkHT=TTuf+8`sJ@qr|MG;NISDo z`p0*)4%#x$M6Y15T0^jtJ<{I4^fmn(Gwthqf1RWd&xz(b)D{#z%P>A)RzxUrPT5bY zHNrw8pV=n4Wol*Fax#eSNqa@6Zy4NmL8aK~$BB*?ND_E%f_viCw zP(CVzca&p$I&Tqs*FI)c{MT`)H|r89QMSV!tJS1hWBQRvXCkErO8AVI1x(7iz@v5Z zP;e3F-M|g@)?|XglTY1$ZgOpG6xMbmH zSw^fK!s_bkmbsM&n`ioVs^sLtPLn|q3RpemlKNi$^_<9*YE;d3uO(md%CIp`)ayLFM~HtC{GbsKk$qr#i<(R0sBVo)r{Bg z_>p+)?bOs7S@N;0mG|_EStFZ#uL3k>Q_v)R(pU;#BzLVo^556O<`r9+e0>Mp)z$aA zGND||dLAEsRT7iVLmOUytCtR48^s%iCx&c!t|;BUWSB>V`@7=KQg6JX-n*-lWc@@* zq>WmvdQ_N}U6bVhd({oY!JE#q7f-lqVI;2S%OmbZ~ALhx^2Nn{tT}HbgAoKTLg{eOWQ9DeQ<3>Si9e zZ!4NN(YaXg6r->vWv5l|YufHr;HwFJjC(_A|FfM0H*P)68C;hwUGMl@WnbLQ3PA_y zzBPfc+pbR-C!XUFce*xt9+}~J8uSKgI*~E<^nOZWebQ-9!PJxu z2TG+~vFY+mZOB&lHGd0M*Iq;Y{^t2VW#1vmHZ({tJ}`)ejc7o$o-=Eh%v^j) zA6F|ZJVxivMtuwDNTmD9*87C53P0+{j~^rMCj@0584i_@gac4Jk|i$4+NB5|QVbHw zC>{Ik)lHJGT_o3LCwD6Ylota6%D&FBvk?1X>0;$xqDD+RoEDx|8=Uj&E!%%eNKoOW zM-(083G5&&eSK_7gPI+!$f8gvH#)1*Q1=&}wY}{0OpRW<_TJUa%~Kz>VpTY2_|&B+ z!?qajSw#pK-tN3m4qDtfP>k`_JqOaaThS}U__hb%suc**wkwOnt-mqQI#x2$e(%^Ul=j=OZ8b8OJ4O1?iq2Ije_@DWH+y7#<;1@aCWtum#K@}xJy+-@$@OP--n+X!?zw` zpAl4YipTX$V2tbY1FdI2OCvYD21my2$)0EA9^jnECI4JSC}Lz~kLUy&Dj6$E;Udy)~xc)|I z_#t&E4+gQyh2>JS0qw23?71Tg9ul%^(U!YZ`Iwbv)>);<(eIR@Mw#&>d`Ps*mG*Vu z&&kWG3Wa6@T>HY7j5i1^oHNc-;)(hR9#*+!L5r=Sk@7%j#?D$bcamZRV^{`v>L_Ga z*Z&uAfs*O~TEgOWxM5u=$Hzl41Pv55C^1@Mk$_w{4URK#)`c0bUY|*ETrChVMl1o7 zZ_~jIakhpydsS3aZsIn#)J@_?=WFJh-4!s9;?8F%`T#wencd(*t>;WY+pz@UW+=wcL`sG;V!>jn}C4PAS%XE$yj>2 zZFEWX>tqAS>E-gQ*-F&5BH+PYKU~1bZ>s46N3hTq;EHfCq>W zO__@NibIZ%nE4hTCn`G1ANr>Lzq0*yGi!%J8#ei@=MfgXWd;3pia>-c6i606c0w$&6E7my|{K+b`vf zVk|Wup%3$HqPJ7agM*i8mHUi?M*c#aLZ*zb5dcPb9LCx%-sJh@#D{Nh8(ZG(oaEF+ zEhy}r-O{l7@chq}2>yOYTVc6ZV!SJa=C6?_j1sP?-j=!Ep59bd(r8jmuhwC+f9s>u zQ8|eSAusO2gz;3a1sIG1iDFZ%n`sU4nUlINmI7f(*aPmZ6OUSOhW*hey`f6{4iy*2hYGP%{mpQeUW} zu3a5Qpw<*u1QpN~Bagb0&N4SP+u^H)*u01G)qhBOef=WdMk&wA#UR3 z)y585z=~g&47^_8D7J)d7qe0hDwq1II@;NV2GQDo_GZC5R-8(GYl=xE?UUXcE5|-O z^mf*9F~}=B2D7oTBgD-oGs#F-qbj-mqAtoO>6IZ&(`ia!!+f&?DU^gBw~8;(>*GGw zqYmNw7=S**qU$3Kk#~=7uRQdx3JzX9%}*~N-TX{SAa}tXd3u(fNr6HZ)AV>L#i1LY zq8xk^#%hE7wbG}yGJu!UsUdvZr5e4Zk;XaF0cuTrQ1wlE|8W>@{3c@mj!`j|wX?ZZ z-S!D_6W^B z4+YCDJD<82ar<)8v8=PMNney)!FUMP5@m}{g;Zo6aL#!4{CyVVayrpF2a_dsN z#-0qv-yIH5XSt3{ztL zE3VR~aeur9RY+A1*rYxRS1B`0GZRV$3NK*RWlA@(L2>u|a0A-%!^2_ccZWkJikicDz31Lpb1{-A*6=sav!*Kir-8Vs6m|K#t-2SbT1P@0szG7W*R9LVNXtB^EQddBS3GH2?U7T%Jb zTXa2fdaRizV7}d9kv0}t=F778*U{3{U4r=4rKGa}Kt|g7A3VhE2j&gO(_0w;bw&0J zBVeHu_1EaDOZhp#I-ctg3MKkX@IbE_22+;zu&G90j#GGm-?%o_qKs1t#4z{P?J3k+ zgJ`$cdQOzg_vT1FlWf^bs{1m{V?;J}%gbJjd*@R-mk4)(el#ponCY%UK*R7#2iKeN zJ|*Sgxt_OsXNNr8OE8Rnw~IFWzwf1+odcKo=kEZQM+_x`Bb{BvP{`3~JVR^*B73iWx1D7|r z`vr74lEmy6=P06RUP9R%#R({rOZU`9n*jC($~hUoL|rw3e18$NBaTEtRb_R54s~Sqb<9hEQ=!^Av6NiO&bi_BDurcl_!E z$hTd8kXjxE2gv4#c?^pP4+sEep&qW-xa4(;+Mgg_?A6=Znmx$ruU}|w?cDYBG#fNr zkmj%07TXqUS}3U(o&iXM8YKHKGM8E#lwlsN;1|V??zK@vyNqr|IjLYcGeU>-q6?Mf z@f4EM?qbq&dp@tL?&qyi(q=gkMjpSmXz~b?Cm51pUldI6EZ-)T!FTE|H*HEls0?k? zS_+XMZC<7ve|>wmh_=m?03%Z5e<(IylIL1I-QC}x-kNpuBA_~Ez7ts%-8DS@h}kaS zFdNoK3^vxh`Z$n+O;_Z1ZCsD_tUOhis}&&i z5j~+!!b_1)?mXPp4$ir$%!NOCIu2aC+_89fAZMJ9rqa&2E265Kmv8KD6o7t9_J|v3 zREw!(z7FK4wbVsVovf4hwD+yeq`4BXxP!_(TVV2tF7XUE`y}YNebCL+v26|+Ugdo=1;5Pb{S64rh~#K+c{mWgjA*j8;J%$i4N6} zVLe3i&|;7e>=C6%dd_tAalLr==J+c&3!VFbfD#o&by~QPelE@Aej&82g=mk9ilN(e zejQ0&Bvq8VxFM(OG-#XnzKxmH_DmI0&f;&!p_vV^WJcKGQuugXeLb|Lw>)&ExU8YR z)JXrul$ko%tr#WDh#YAxn@Ubgr)lb!K6lH@_lA^qZ923xYHU!~;soT)F3-KCb6!nR z1grtJCw!%99*L_?#hz6`D=7z%;zRtK?D-EcoP64_iC@wF{H6Tp#AS@+iC$?t0u2Jy z`oTCLs%rS9!gS^tw$iPI0g(*j4f#8l-!7pCr%}F1uK*v?w7Y^vHkWumSetARGVzlT z)2_A&jsCHg;MA&EQDLxAIt6CF;J=;?u!J3cN z_ne5=51r-Ig?EfX6w=XRz^#>_ocx4DfIMeG6wMyOhDp3Kr&55YUCIKcjw*78(=m%P zU5_wcnLspEGHNuHZX{RlG`XeKiALXh72yEX?&hgE71fBA4+;@58}>T;j_NX1GdA&h zUyY>Lp3o2_W&J_2{Rc0FjgFYr1yk8Lt-+wG?ho$rsEb*disQ1OjbT!@uWSUc+{0$K z%jyGMz-|q}sjJ8X@MDM5sn#)kZ-@|;8lQB-C<-_=bQtEp9JU#R2+EIr&<38uKvs2E z=kYhfuy$iWAAl`C=c`KOfBym7cYvr}-{1rroqz0+CpXMK{u+H}0DcPk8&0TV* z{W}c{49>JT|5o6ZmJRRa@Dvi5;L!T!Z?(aQXSY2>^rH?P3fq-29dUpaEcAONAWcjE zdx!ny=Xyqa1IP!v*Dla3)8>7!x zClf+PEQRa#d1T;^+TjPUzL0P&JhegIe_&F?G~mj(mz$b#%qGl$)lbuHgbT*$LHx9* z5Pb$w3;5ErS}5BM_V`2?K=2N`Qr8Ea%HQTT>;5p?O}2gEtAkG0eTQx0kqUS0wF(V! z-2NSF<6C^52W~{DqisO@IFnY1*{lIkVgFv8cVX9b+?xl@2h%Ql+~%qmQCnlXZL`1i zB2dsSKfOX3uN8Bar$|N|g4B(-^U14jwx``TJb?e6&ZcT$S|b9ApyctH7ov(oqTRte zzyuv>#;3PHxdNZW`v!e5bMS`eSD(iPg&uv~YjQ1EEd8%t*Da87wzSXuW!iFbaok-# z{SCDFEm|@KHWO&xapLrXjgVpY=TMx{4hj`>?SdIZouA%K^##+Eg0BiOdk=%ZMu z4g$8H<1NXy&ufZIx#jgd_4uwD`XKFsi^-dGH+}f*766qcN7LNtDN*{8`-ZB(2hyP( z(Oy_K|Jwo!*etRQ@>03X^_e5^s3=;`28V^u(!)3bKgDGjFvgf*&^BP}D@C&2V+*Qc z-=v`31P0#K0buv|^_4R}|5F+?fjAiX?cHI{(MFUc&Vh*Z0qZpP|I-ylG|;PB&2+6; zrGa8vNH9iq8a918=MD!IPJgJ;a_}&1?(f9Ikj2*PRpHnIk7VD(jm*WAqn}IS8Rs&` z-XZ*@I&{}wygB9rlR%3w%uC%}v`ec-m7NJJc6DC1x8n?HwSXl*L#vDq;k$9t?r3m> zsPE4s1M83~1Ua2dM5Z5|XeyQ0mZkms5I$c4{ri_cYsgYqy^VyMU-@0Mp$ z68S<&B;P&pJIx!CPH8KKk>ZUStbs?;8UAUIve5gufPi~`E94F~Y3UQ9?|JaabFg^e z|LurPybLBx1weGO^6IP$uQBj%_tw3XET9g>*kfW z(k_sxTfcXfH`v?8(jS+am+rcF`>P1=<*UIQbHy8avMmt2<9&kq+fDL_^8Y;9842u7 zsj#V0v3Ykf$u?>3M}uC6&!zt4vsbT+LVIx_U%}Cwu5WMvCvjhw&$(z*axBf(xvYqJ zFW}zYiJysEt6o{F9EGL@shV2P90l574N#)z!~U7R?kl=*5iG=YZrb~0o3*b&z-Eb4 z;CTK?hb0T)YCI5n^)y7{s&Lmq1_PflD^ppTNEm}VL4}O~N`>kWr1n8{AN+@$1gGW_ zU!#|TVk!4CM$-F!S5aFgBkbt?Sw~HIcUe!&6C_IdA6sIi$l$#E2H`;JvXT&C!X-5& z`w`M$gN!gEQnbLLNn+RjT$?AmvqF~tk#BTP`?CUd;K#O+T}uKL)U-@3yb))`t$fwS zqMaEQuo6Uzl;cy~yUJ}nynMWkm=B){DdE_FGbnlb0=<>onP6l{$HJi#XE z6BAQaLdQf@V-^q#zbzncdZFQ`Q{#bFoeoSw>?N{Q#N@h(=$rrZG`p~oqzpO6sr0t$ zb@|(^S?+jDtdQ@K1&Hyk+T8@~5Fg*(-|DyKJ=)vCc&$NcE-wzheL_!C>x~iE>_P#c z!c;IUqM1v_x)kBppECFEQd!?`%XNL4Hpd&&CDK<6+`_10<1H;bc(u1K3z`EYq@Ej% z*b6y+3IX1dzc5E}|0gv+%2N)m!X}EUwE)^f2*cYUKCU^Xg`v*bzFUKH>d4D7$!Er? zllmSH|6G}_{#-&uIRnz+gEwUtYe!8LRX4$K@ffX+M`p_Eeu8el#fKBvOR=mCBx+{> z3JYIaYdmJ8I_LMu(EN>sGE4pt3_&elHbn?OhvePwWp19Nd4`64@do#XA30R$=1H zhtq(oAGqyI;}XEQy>D1gveobLARLHx`<2inz;oezp&2f~`$)O3YWH9|_UwewbOspH zQ8|ddW^IK|Y6Oq|f%!ST_SlS(WDj>e_pUWt8jAV37o=@fQ5VLHlbj&z4=OIIGy5Vgl$UC>jL| zqiveQAzeLK9omJww_wi_d>9z9?=oMYV+3`!=HxT(fHn^bgX{$xuwVu-Ispq3{0)|U zglK%4YEOu8s7_cn1Z&n{EMWt^e)+CiMbL44+qNpB1s)#Mj~)J*2;S~ys8{U8-);wc zd_I3?N`Ubb;G{e)&}_3Ik@O(QGFLs-Tk}TSQ;Yj!VJY*=U2=VlgkX<@&)rI+@Q3dJ zwPx2jj<+SR&eU2LW-Hy74Tqt2kfqy)t`~e}r89p%lyL-`h1iVQmlzlhZ2I*yY$*sp zm>o>yXUx9Wf)$VBUPUe~ay6Nxx#@$4QpiUVBA#Q_Cm^aHpoK1Fi(q%p5^SL)=({c+ zT>ttz5RKMv4F&dVK%69ncjN%zoC5~rg=M3JsmWGA6~MH9sy*C}(}BVR z6!H96*%9JsXxs9Hj%AER!@7r+~?}%shkEeufHS_RXdQFH=W&_;w>ey&=-d3 zvS=JA?XC7i^xV%xgoi{AweyEl&>O5fSQG=g?k#Z8GlMxVqQ_9$bs8uBm%jVn1p{Ir zBp$Lkg9zsUzk?oq2%oc%1~x_kjI%GOHMeu8ME6JRZf@)@x%W5~nc!AcEpxOrq{%4< zMa7=sJ-a)BA$FthT^ni?F6!-kr^A*P?*Q}X=$N!LEX%ll^yMra28nZZAnaf@_4RYwYi zwi~*Bb4CJViuCQtnmA;9kPmtc)z-pu!1A8}zgn?-0z7_^a=yUz3T?3zVb?a$-Tko| zSZ28)QYU507L*11@c$me7rQG$Q6JJvtNc5EPiw)_T|ER#e&>TrDq6I2d<2?JJB#V+ zM`~~ZoMweRxDhWe_w)3(I2fJe{PotC>Ni5nrnMfr>}9Xq0*bP2FK=boOBF zm%G*d`hNg10(*VU-)O|Lvq1pUi>j(LN2jHfW8o)LQN9B}$n@el;4a^Z^uJgIVYq|` zOl2wvphT&V^m>YVPL?z+S3LEO=Drwa=<7fhr=E=n%Z_e_Uw8c{JoU^*PR6eL^~qCV zi7$sfU68H?##-I3=~5&xeI^|%bHjy@Z<2HC$+zpUtdfJuhqPcTH&jVmW$8g!el-}P zK;tV%h{MV<=l${S8mACZ{r+G`hubxs86*GPCY^YY1) z{dE!4lU;@*gP|H=Kjr5xA=fjkhY`FH+SG;e~EpX7L!ywC}E!2z8^Cm3tAX1 zYJO+c!M@q*jtnI&YujB#Y?3@wW0GRG4d~ZVV!OpaTTxGY_NE^+mKT;L`ovibSlkq5 zQsgARv1een_bVD$)|%mtuMKItRvZfIIma^Qh3*Ked%-fC9j^Zc(hB?s1MI>7ubp#! zYx>CIIJmURqb=K|m7)T6RS+#AHznMnyQQcg*@_?+6)XyJ7ZhR$5Zk)C%1uEoLI|yZ zDG~ueKnx^2PzfL;3MG+iKuNen2yzR#@0mP(_UX(019-ukAM=~@o5{?XIWy<;9dwCk zo#aMt3YiTiWkc%R%}t*!g@_)~Ayv7;Q~`wZIU`R#4oIb z;#@hLh0eFuj{V3ho<}$OS*8B7Mr-qwlstM_I2V|g)(Jn@r zB;O7_UwV+(z|S^ZjCP2UY*%1FQF0mY5MSsN%Qt~j-jwY{BdMN4N%qb< zG~K7~TPaNfvwa-sQ}&EgWm9A#%J>()Y>4UyQ-;4%u)WSLY(6`mz`?evEg^?kDa|6%|?1 zW1}krFngKm$xNY5aiD?aeh8t*JT8jzDXHMYjGILVZJeGR%p6?~V5yTE@7uEEiLDRp z=dD{qIe~H0TWv1L%2>~L!;ONTDy*6mVEk-(q;X*vGq;kG+KvxoZ_yTgZb9bd{J~O9 zyem%H>@oAJ@G&-)5bcZEYa$z2U&D-)N!;zn%U z-*b);j18SAFv%C_CQVi`M6r>da_Kd{8;okPdLwP&UN2oV4B=d@T+!#WgCgD&N?bSj zAV&G{1%bsm9qaj`BGjXBkdn!e-1z$JtK@pBM%$&lzSS$m#k z2k`0(wa)2Yy*6bQ%nx%S>!@(~00jwtwZ!WBnvE6g4H3>_QB2-`Js;?`(Z%|lavx5! z_jy!gW9l_KPhj?hBb?eXuNd3D-nUq=rgf-s;J4&${*PQorMVT>jr8TGBPC~JG>p{{ z<_?45pH*5?QgFY|u?EDB28SN$Co+e`0~CDn)ZC^_kFf|3n2=hYf6N&%0YQq}i|jXBOm*ocO}gqk2-#TxFMc9;c&_ zG5HoCD48x_&SzGCWg=Cs@4NV)Q>f96hg%#nic|U)bVtl)0$u3)NQ%77GSbfpy}`p0 zkQ=GO&vow&bijfbOF+*U<#pG94m(z^sA1^M7pF0QAgGJwQq}j~G(@h$SAT09T>7>IrpUV8$u6KY4 zHHq3J6O5Y)o-MTY1MmUFly?-Q>Hvpfg`F(b#JFM^TM6z-V{Sx;RmNmfqne zGKdYh4qw&@KsSKV%f4yCFWL#qmh_d_{ zt2re4$|k|hlofZwZM5Roa?~*LKA)iU@pe&2@xqtI%GMjSp=YFZA(!9&wio_6CjF*m zOPk#(vP?Kjl3PZY2ARTBtqr~$D_FBbIOHqGf``@fm9_OUG~&mk-?k)X0Kte6STR=X zdi3|}qvD{Tv%!6HJQ%=_Fq7AXXa@M-gN)9NzV{*~UE(mtBZr5vhig%b4?zWf)Mx7c z!T4r<+yA}p^*jJKErYh2UDSNR&UM_+9L(y-y~{-6%|AVRWbwlM4AyBl|L6P2@|_L7 zZWk%vlOC%@Zt+DtAz?D4$a5bqm1zX=&ZM7JuDY z?ir=O^3kHhpQ;BzDf==JC*Vy{?q;kH6ufrp!BZ!e?07QuN;1&5FY`Q$@;gxt+k>2W z1ymxAkhZm|$XVM4wQc)n)fw{uMV0p#Qf=ESq6>!4q?4bl-FZJRC3Q%&d3jwj$h#R-R%}Gj8nrB)y((S}aFTFz}?S2>JdAzh#z0WzCWpKvEQO$R(a(8oE zP8;3|?~Gl6R=urlh&)0f&E8`HDQYN{7N3qCl(hLXu7tZRu|=N-#Mi~WNK*)j(MRy7 zMAOX}WGifEkdZ5iWT93JP|eQMc#ODnqbu2aVUeWVjt5Ppg}ajBJ8X9D4wnF^MLhcG`1w zJj-1ArmA_j4HfV4Ml-e8pKFW0Oq66!RH@GOUr)y)m8MA(qTkv0K806G>3miyRVYz0 z4JMECFUQ`F-Bf?|Fdl6fLPp7KFS-?KfuU)T5sOa>KhBLB#-4kJgybCgao(z_9hN=F ze?2yvf7tS1cg+HHNsHx~LJ%KHz1->QY<5N5SZgN0yUafzH&n~Np?(6jUE*$Q4Ex*7 z^8PL01f1^hmCIXMb-iIt`&>xJQkEyHx>St2!vuZ?nr=XIk$_G;5=gS>39&BMECg|* zpb}Mo1!*eygpl6`U$c3Lp9p5IJW#X}#U0rZE5~0oVCoO5TWW0Lil1!yl)Gp^QvdEp z$0FM^I<(lU6SthDso>R-A{TD;4akBalvwbE<^6->!6wH!_v#f~dEh;KlbY?MSe@%j zVTzO6{!LXHyVX|VeKj%kWwP4_*0iNcRX!XmUV&v%7bykmDQDt?8I6AhOFw&nrEuE< zg4#{|a&TF{!Gt@of}~=UO8v!LS156<&$Y!tTQ2{0Aw$ByYccdOd*!6(05d7!jB30u zg1ur3WwbKfK|}%(gBD_$3}f^}rQEo6;R%+o8i_-vCqbumXxY^lh7P3&^P+paLZ>WJ z^rz86!~8(hgDY*yY&ohWE%jP_R>GWd7f00?vjL0QFDxsKjzvG^+<&6c zXE_yb@TI;6(pjwneH;m}=dFY@gaO!z!=$W98UQpc)oTl#$;>4h58n?E4;EC7GVGe= zv$2}Dv0qq{+CphOOdH50`X!vhp@?g7b9^Y`Z7BnH&9ehwOcc!g@T~u$Z67dhl;*a9 z=gHf*HY{D}9~^aB`F)h;Pq$3J!E}I+8I$5#_WbhTNLH&ca<^I^Hk>n0PuPnTKeV - - - - - Recipe file - - - - - - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -876,6 +836,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/automatic_linting/index.html b/dev/automatic_linting/index.html index 76cd72ed9..c4e0d6c28 100644 --- a/dev/automatic_linting/index.html +++ b/dev/automatic_linting/index.html @@ -102,7 +102,7 @@ @@ -827,17 +897,13 @@ -

    Automatic linting in VSCode#

    -

    The new recipe format comes with a strict JSON scheme. You can find the scheme -in this repository.

    -

    It is implemented with pydantic and renders to a JSON schema file. The YAML -language server extension in -VSCode -can recognize the scheme and give helpful hints during editing.

    -

    With the YAML language server installed, the automatic linting can be enabled by -adding the following line to the top of the recipe file:

    +

    Enabling Automatic Linting in VSCode#

    +

    Our new recipe format adheres to a strict JSON schema, which you can access here.

    +

    This schema is implemented using pydantic and can be rendered into a JSON schema file. The YAML language server extension in VSCode is capable of recognizing this schema, providing useful hints during the editing process.

    +

    To enable automatic linting with the YAML language server, you need to add the following line at the beginning of your recipe file:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json
     
    +

    Alternatively, if you prefer not to add this line to your file, you can install the JSON Schema Store Catalog extension. This extension will also enable automatic linting for your recipe files.

    diff --git a/dev/build_options/index.html b/dev/build_options/index.html index f0a442100..c376f6daf 100644 --- a/dev/build_options/index.html +++ b/dev/build_options/index.html @@ -11,10 +11,10 @@ - + - + @@ -440,26 +440,6 @@ - - -
  • - - - - - Recipe file - - - - -
  • - - - - - - - @@ -567,26 +547,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -861,6 +821,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + @@ -1190,9 +1260,9 @@

    Dynamic linking configuration @@ -1213,16 +1283,16 @@

    Dynamic linking configuration diff --git a/dev/cli_usage/index.html b/dev/cli_usage/index.html index 1442e75f6..fbc27654b 100644 --- a/dev/cli_usage/index.html +++ b/dev/cli_usage/index.html @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -849,6 +809,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/compilers/index.html b/dev/compilers/index.html index 42e513550..631cebf66 100644 --- a/dev/compilers/index.html +++ b/dev/compilers/index.html @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -825,6 +785,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/experimental_features/index.html b/dev/experimental_features/index.html index 768d6eae0..32476c8b7 100644 --- a/dev/experimental_features/index.html +++ b/dev/experimental_features/index.html @@ -11,7 +11,7 @@ - + @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -480,26 +460,6 @@ - - -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - @@ -849,6 +809,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + @@ -1081,9 +1151,9 @@

    Usage diff --git a/dev/highlevel/index.html b/dev/highlevel/index.html index 8619ea014..5af2e35bc 100644 --- a/dev/highlevel/index.html +++ b/dev/highlevel/index.html @@ -14,7 +14,7 @@ - + @@ -510,11 +510,11 @@
  • - + - Recipe file + Advanced options @@ -530,11 +530,11 @@
  • - + - Advanced options + Experimental features @@ -550,11 +550,11 @@
  • - + - Jinja functions + Selector syntax @@ -570,11 +570,11 @@
  • - + - Experimental features + Build script @@ -590,11 +590,11 @@
  • - + - Selector syntax + Variants @@ -610,11 +610,11 @@
  • - + - Build script + Compilers and cross compilation @@ -630,11 +630,11 @@
  • - + - Variants + CLI Usage @@ -650,11 +650,11 @@
  • - + - Compilers and cross compilation + Terminal User Interface (TUI) @@ -670,11 +670,11 @@
  • - + - CLI Usage + Authentication & upload @@ -690,11 +690,11 @@
  • - + - Terminal User Interface (TUI) + Automatic recipe linting @@ -710,11 +710,11 @@
  • - + - Authentication & upload + Testing packages @@ -730,11 +730,11 @@
  • - + - Automatic recipe linting + Reproducible builds @@ -750,11 +750,11 @@
  • - + - Testing packages + Package specification @@ -770,11 +770,11 @@
  • - + - Reproducible builds + Activation scripts @@ -790,11 +790,11 @@
  • - + - Package specification + Internals @@ -809,12 +809,52 @@ + + + + + + + + + +
  • + + + + + + + + + + +
  • + + @@ -930,29 +1000,40 @@

    How does rattler-build work?recipe.yaml file that specifies how the package is to be built and what the dependencies are. From the recipe file, rattler-build executes several steps:

      -
    1. Parse the recipe file and evaluate conditional parts (we will see that later, - but parts of the recipe can be conditional, e.g. on Windows vs. macOS)
    2. -
    3. Retrieve all source files specified in the recipe, such as .tar.gz files, - git repositories or even local paths. Additionally, this step will apply - patches that can be specified alongside the source file.
    4. -
    5. Download and install dependencies into temporary "host" and "build" - workspaces. Any dependencies that are needed at build time are installed in - this step.
    6. -
    7. Execute the build script to build/compile the source code and "install" it - into the host environment.
    8. -
    9. Collect all files that are new in the "host" environment (because the build - script just created them) and apply some transformations if necessary; - specifically, we edit the rpath on Linux and macOS to help make binaries - relocatable.
    10. -
    11. Bundle all the files in a package and write out any additional metadata into - the info/index.json, info/about.json, and info/paths.json files. This - also creates the test files that are bundled with the package.
    12. -
    13. If any tests are specified in the recipe, then those tests are run. The package - is considered "done" if it passes all of the tests, otherwise we move it to a - "broken" place.
    14. +
    15. Rendering:
    16. +
    +

    Parse the recipe file and evaluate conditionals, Jinja expressions, and + variables, and variants.

    +
      +
    1. Fetch source:
    2. +
    +

    Retrieve specified source files, such as .tar.gz files, git repositories, local paths. + Additionally, this step will apply patches that can be specified alongside the source file.

    +
      +
    1. Install build environments:
    2. +
    +

    Download and install dependencies into temporary "host" and "build" workspaces. + Any dependencies that are needed at build time are installed in this step.

    +
      +
    1. Build source:
    2. +
    +

    Execute the build script to build/compile the source code and install it into the host environment.

    +
      +
    1. Prepare package files:
    2. +
    +

    Collect all files that are new in the "host" environment and apply some transformations if necessary; + specifically, we edit the rpath on Linux and macOS to make binaries relocatable.

    +
      +
    1. Package:
    2. +
    +

    Bundle all the files in a package and write out any additional metadata into the info/index.json, info/about.json, and info/paths.json files. + This also creates the test files that are bundled with the package.

    +
      +
    1. Test:
    -

    After this process, a package is created. This package can be uploaded to somewhere -like a custom prefix.dev private or public channel.

    +

    Run any tests specified in the recipe. + The package is considered done if it passes all the tests, otherwise its moved to broken/ in the output directory.

    +

    After this process, a package is created. This package can be uploaded to somewhere like a custom prefix.dev private or public channel.

    How to run rattler-build#

    Running rattler-build is straightforward. It can be done on the command line:

    rattler-build build --recipe myrecipe/recipe.yaml
    @@ -1134,16 +1215,16 @@ 

    Overview of a recipe.yaml @@ -1118,6 +1190,26 @@

    diff --git a/dev/rebuild/index.html b/dev/rebuild/index.html index d68e9a227..737b3f266 100644 --- a/dev/rebuild/index.html +++ b/dev/rebuild/index.html @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -849,6 +809,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/reference/cli/index.html b/dev/reference/cli/index.html new file mode 100644 index 000000000..61c53986c --- /dev/null +++ b/dev/reference/cli/index.html @@ -0,0 +1,1872 @@ + + + + + + + + + + + + + + + + + + + + + + + + + CLI - rattler-build + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + + + + + + + + +
    + + +
    + +
    + + + + + + +
    +
    + + + +
    +
    +
    + + + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    + + + + + + + + + + + + +

    Command-Line Help for rattler-build#

    +

    This document contains the help content for the rattler-build command-line program.

    +

    rattler-build#

    +

    Usage: rattler-build [OPTIONS] [COMMAND]

    +
    Subcommands:#
    +
      +
    • build — Build a package from a recipe
    • +
    • test — Run a test for a single package
    • +
    • rebuild — Rebuild a package from a package file instead of a recipe
    • +
    • upload — Upload a package
    • +
    • completion — Generate shell completion script
    • +
    • generate-recipe — Generate a recipe from PyPI or CRAN
    • +
    • auth — Handle authentication to external channels
    • +
    +
    Options:#
    +
      +
    • +

      -v, --verbose

      +

      Increase logging verbosity

      +
    • +
    • +

      -q, --quiet

      +

      Decrease logging verbosity

      +
    • +
    • +

      --log-style <LOG_STYLE>

      +

      Logging style

      +
        +
      • Default value: fancy
      • +
      • Possible values:
          +
        • fancy: + Use fancy logging output
        • +
        • json: + Use JSON logging output
        • +
        • plain: + Use plain logging output
        • +
        +
      • +
      +
    • +
    • +

      --color <COLOR>

      +

      Enable or disable colored output from rattler-build. Also honors the CLICOLOR and CLICOLOR_FORCE environment variable

      +
        +
      • Default value: auto
      • +
      • Possible values:
          +
        • always: + Always use colors
        • +
        • never: + Never use colors
        • +
        • auto: + Use colors when the output is a terminal
        • +
        +
      • +
      +
    • +
    +

    build#

    +

    Build a package from a recipe

    +

    Usage: rattler-build build [OPTIONS]

    +
    Options:#
    +
      +
    • +

      -r, --recipe <RECIPE>

      +

      The recipe file or directory containing recipe.yaml. Defaults to the current directory

      +
        +
      • Default value: .
      • +
      +
    • +
    • +

      --recipe-dir <RECIPE_DIR>

      +

      The directory that contains recipes

      +
    • +
    • +

      --up-to <UP_TO>

      +

      Build recipes up to the specified package

      +
    • +
    • +

      --build-platform <BUILD_PLATFORM>

      +

      The build platform to use for the build (e.g. for building with emulation, or rendering)

      +
        +
      • Default value: linux-64
      • +
      +
    • +
    • +

      --target-platform <TARGET_PLATFORM>

      +

      The target platform for the build

      +
        +
      • Default value: linux-64
      • +
      +
    • +
    • +

      -c, --channel <CHANNEL>

      +

      Add a channel to search for dependencies in

      +
        +
      • Default value: conda-forge
      • +
      +
    • +
    • +

      -m, --variant-config <VARIANT_CONFIG>

      +

      Variant configuration files for the build

      +
    • +
    • +

      --render-only

      +

      Render the recipe files without executing the build

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --with-solve

      +

      Render the recipe files with solving dependencies

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --keep-build

      +

      Keep intermediate build artifacts after the build

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --no-build-id

      +

      Don't use build id(timestamp) when creating build directory name

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --compression-threads <COMPRESSION_THREADS>

      +

      The number of threads to use for compression (only relevant when also using --package-format conda)

      +
    • +
    • +

      --use-zstd

      +

      Enable support for repodata.json.zst

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --use-bz2

      +

      Enable support for repodata.json.bz2

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --experimental

      +

      Enable experimental features

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --auth-file <AUTH_FILE>

      +

      Path to an auth-file to read authentication information from

      +
    • +
    • +

      --tui

      +

      Launch the terminal user interface

      +
        +
      • Default value: false
      • +
      • Possible values: true, false
      • +
      +
    • +
    +
    Modifying result#
    +
      +
    • +

      --package-format <PACKAGE_FORMAT>

      +

      The package format to use for the build. Can be one of tar-bz2 or conda. +You can also add a compression level to the package format, e.g. tar-bz2:<number> (from 1 to 9) or conda:<number> (from -7 to 22).

      +
        +
      • Default value: conda
      • +
      +
    • +
    • +

      --no-include-recipe

      +

      Don't store the recipe in the final package

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --no-test

      +

      Don't run the tests after building the package

      +
        +
      • Default value: false
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --color-build-log

      +

      Don't force colors in the output of the build script

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --output-dir <OUTPUT_DIR>

      +

      Output directory for build artifacts.

      +
        +
      • Default value: ./output
      • +
      +
    • +
    • +

      --skip-existing <SKIP_EXISTING>

      +

      Whether to skip packages that already exist in any channel If set to none, do not skip any packages, default when not specified. If set to local, only skip packages that already exist locally, default when using --skip-existing. If set toall`, skip packages that already exist in any channel

      +
        +
      • Default value: none
      • +
      • Possible values:
          +
        • none: + Do not skip any packages
        • +
        • local: + Skip packages that already exist locally
        • +
        • all: + Skip packages that already exist in any channel
        • +
        +
      • +
      +
    • +
    +

    test#

    +

    Run a test for a single package

    +

    This creates a temporary directory, copies the package file into it, and then runs the indexing. It then creates a test environment that installs the package and any extra dependencies specified in the package test dependencies file.

    +

    With the activated test environment, the packaged test files are run:

    +
      +
    • info/test/run_test.sh or info/test/run_test.bat on Windows * info/test/run_test.py
    • +
    +

    These test files are written at "package creation time" and are part of the package.

    +

    Usage: rattler-build test [OPTIONS] --package-file <PACKAGE_FILE>

    +
    Options:#
    +
      +
    • +

      -c, --channel <CHANNEL>

      +

      Channels to use when testing

      +
    • +
    • +

      -p, --package-file <PACKAGE_FILE>

      +

      The package file to test

      +
    • +
    • +

      --use-zstd

      +

      Enable support for repodata.json.zst

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --use-bz2

      +

      Enable support for repodata.json.bz2

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --experimental

      +

      Enable experimental features

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --auth-file <AUTH_FILE>

      +

      Path to an auth-file to read authentication information from

      +
    • +
    +
    Modifying result#
    +
      +
    • +

      --output-dir <OUTPUT_DIR>

      +

      Output directory for build artifacts.

      +
        +
      • Default value: ./output
      • +
      +
    • +
    +

    rebuild#

    +

    Rebuild a package from a package file instead of a recipe

    +

    Usage: rattler-build rebuild [OPTIONS] --package-file <PACKAGE_FILE>

    +
    Options:#
    +
      +
    • +

      -p, --package-file <PACKAGE_FILE>

      +

      The package file to rebuild

      +
    • +
    • +

      --no-test

      +

      Do not run tests after building

      +
        +
      • Default value: false
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --use-zstd

      +

      Enable support for repodata.json.zst

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --use-bz2

      +

      Enable support for repodata.json.bz2

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --experimental

      +

      Enable experimental features

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --auth-file <AUTH_FILE>

      +

      Path to an auth-file to read authentication information from

      +
    • +
    +
    Modifying result#
    +
      +
    • +

      --output-dir <OUTPUT_DIR>

      +

      Output directory for build artifacts.

      +
        +
      • Default value: ./output
      • +
      +
    • +
    +

    upload#

    +

    Upload a package

    +

    Usage: rattler-build upload [OPTIONS] [PACKAGE_FILES]... <COMMAND>

    +
    Subcommands:#
    +
      +
    • quetz — Upload to aQuetz server. Authentication is used from the keychain / auth-file
    • +
    • artifactory — Options for uploading to a Artifactory channel. Authentication is used from the keychain / auth-file
    • +
    • prefix — Options for uploading to a prefix.dev server. Authentication is used from the keychain / auth-file
    • +
    • anaconda — Options for uploading to a Anaconda.org server
    • +
    +
    Arguments:#
    +
      +
    • +

      <PACKAGE_FILES>

      +

      The package file to upload

      +
    • +
    +
    Options:#
    +
      +
    • +

      --use-zstd

      +

      Enable support for repodata.json.zst

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --use-bz2

      +

      Enable support for repodata.json.bz2

      +
        +
      • Default value: true
      • +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --experimental

      +

      Enable experimental features

      +
        +
      • Possible values: true, false
      • +
      +
    • +
    • +

      --auth-file <AUTH_FILE>

      +

      Path to an auth-file to read authentication information from

      +
    • +
    +
    Modifying result#
    +
      +
    • +

      --output-dir <OUTPUT_DIR>

      +

      Output directory for build artifacts.

      +
        +
      • Default value: ./output
      • +
      +
    • +
    +

    quetz#

    +

    Upload to aQuetz server. Authentication is used from the keychain / auth-file

    +

    Usage: rattler-build upload quetz [OPTIONS] --url <URL> --channel <CHANNEL>

    +
    Options:#
    +
      +
    • +

      -u, --url <URL>

      +

      The URL to your Quetz server

      +
    • +
    • +

      -c, --channel <CHANNEL>

      +

      The URL to your channel

      +
    • +
    • +

      -a, --api-key <API_KEY>

      +

      The Quetz API key, if none is provided, the token is read from the keychain / auth-file

      +
    • +
    +

    artifactory#

    +

    Options for uploading to a Artifactory channel. Authentication is used from the keychain / auth-file

    +

    Usage: rattler-build upload artifactory [OPTIONS] --url <URL> --channel <CHANNEL>

    +
    Options:#
    +
      +
    • +

      -u, --url <URL>

      +

      The URL to your Artifactory server

      +
    • +
    • +

      -c, --channel <CHANNEL>

      +

      The URL to your channel

      +
    • +
    • +

      -r, --username <USERNAME>

      +

      Your Artifactory username

      +
    • +
    • +

      -p, --password <PASSWORD>

      +

      Your Artifactory password

      +
    • +
    +

    prefix#

    +

    Options for uploading to a prefix.dev server. Authentication is used from the keychain / auth-file

    +

    Usage: rattler-build upload prefix [OPTIONS] --channel <CHANNEL>

    +
    Options:#
    +
      +
    • +

      -u, --url <URL>

      +

      The URL to the prefix.dev server (only necessary for self-hosted instances)

      +
        +
      • Default value: https://prefix.dev
      • +
      +
    • +
    • +

      -c, --channel <CHANNEL>

      +

      The channel to upload the package to

      +
    • +
    • +

      -a, --api-key <API_KEY>

      +

      The prefix.dev API key, if none is provided, the token is read from the keychain / auth-file

      +
    • +
    +

    anaconda#

    +

    Options for uploading to a Anaconda.org server

    +

    Usage: rattler-build upload anaconda [OPTIONS] --owner <OWNER>

    +
    Options:#
    +
      +
    • +

      -o, --owner <OWNER>

      +

      The owner of the distribution (e.g. conda-forge or your username)

      +
    • +
    • +

      -c, --channel <CHANNEL>

      +

      The channel / label to upload the package to (e.g. main / rc)

      +
        +
      • Default value: main
      • +
      +
    • +
    • +

      -a, --api-key <API_KEY>

      +

      The Anaconda API key, if none is provided, the token is read from the keychain / auth-file

      +
    • +
    • +

      -u, --url <URL>

      +

      The URL to the Anaconda server

      +
        +
      • Default value: https://api.anaconda.org
      • +
      +
    • +
    • +

      -f, --force

      +

      Replace files on conflict

      +
        +
      • Default value: false
      • +
      • Possible values: true, false
      • +
      +
    • +
    +

    completion#

    +

    Generate shell completion script

    +

    Usage: rattler-build completion [OPTIONS]

    +
    Options:#
    +
      +
    • +

      -s, --shell <SHELL>

      +

      Shell

      +
        +
      • Possible values: bash, elvish, fish, powershell, zsh
      • +
      +
    • +
    +

    generate-recipe#

    +

    Generate a recipe from PyPI or CRAN

    +

    Usage: rattler-build generate-recipe <SOURCE> <PACKAGE>

    +
    Arguments:#
    +
      +
    • +

      <SOURCE>

      +

      Type of package to generate a recipe for

      +
        +
      • Possible values:
          +
        • pypi: + Generate a recipe for a Python package from PyPI
        • +
        • cran: + Generate a recipe for an R package from CRAN
        • +
        +
      • +
      +
    • +
    • +

      <PACKAGE>

      +

      Name of the package to generate

      +
    • +
    +

    auth#

    +

    Handle authentication to external channels

    +

    Usage: rattler-build auth <COMMAND>

    +
    Subcommands:#
    +
      +
    • login — Store authentication information for a given host
    • +
    • logout — Remove authentication information for a given host
    • +
    +

    login#

    +

    Store authentication information for a given host

    +

    Usage: rattler-build auth login [OPTIONS] <HOST>

    +
    Arguments:#
    +
      +
    • +

      <HOST>

      +

      The host to authenticate with (e.g. repo.prefix.dev)

      +
    • +
    +
    Options:#
    +
      +
    • +

      --token <TOKEN>

      +

      The token to use (for authentication with prefix.dev)

      +
    • +
    • +

      --username <USERNAME>

      +

      The username to use (for basic HTTP authentication)

      +
    • +
    • +

      --password <PASSWORD>

      +

      The password to use (for basic HTTP authentication)

      +
    • +
    • +

      --conda-token <CONDA_TOKEN>

      +

      The token to use on anaconda.org / quetz authentication

      +
    • +
    +

    logout#

    +

    Remove authentication information for a given host

    +

    Usage: rattler-build auth logout <HOST>

    +
    Arguments:#
    +
      +
    • +

      <HOST>

      +

      The host to remove authentication for

      +
    • +
    +
    + +

    + This document was generated automatically by + clap-markdown. +

    + + + + + + + + + + + + + + + + +
    +
    + + + + + +
    + + + +
    + + + + + + +
    +
    +
    +
    + +
    + + + + + + + + + + \ No newline at end of file diff --git a/dev/available_jinja/index.html b/dev/reference/jinja/index.html similarity index 90% rename from dev/available_jinja/index.html rename to dev/reference/jinja/index.html index 6dba70fd5..4176bf2e5 100644 --- a/dev/available_jinja/index.html +++ b/dev/reference/jinja/index.html @@ -8,16 +8,14 @@ - + - + - - - + @@ -26,10 +24,10 @@ - + - + @@ -48,9 +46,9 @@ - + - + @@ -63,7 +61,7 @@ - + @@ -71,7 +69,7 @@ - + @@ -79,7 +77,7 @@ - + @@ -124,7 +122,7 @@ - + + @@ -630,11 +521,11 @@
  • - + - Experimental features + Variants @@ -650,11 +541,11 @@
  • - + - Selector syntax + Compilers and cross compilation @@ -670,11 +561,11 @@
  • - + - Build script + CLI Usage @@ -690,11 +581,11 @@
  • - + - Variants + Terminal User Interface (TUI) @@ -710,11 +601,11 @@
  • - + - Compilers and cross compilation + Authentication & upload @@ -730,11 +621,11 @@
  • - + - CLI Usage + Automatic recipe linting @@ -750,11 +641,11 @@
  • - + - Terminal User Interface (TUI) + Testing packages @@ -770,11 +661,11 @@
  • - + - Authentication & upload + Reproducible builds @@ -790,11 +681,11 @@
  • - + - Automatic recipe linting + Package specification @@ -810,11 +701,11 @@
  • - + - Testing packages + Activation scripts @@ -830,11 +721,11 @@
  • - + - Reproducible builds + Internals @@ -847,14 +738,56 @@ + + + + + + + + + + + + + +
  • + + + + + + + + + +
  • @@ -1031,7 +1101,7 @@ - + @@ -1219,9 +1289,9 @@

    Default Jinja filters @@ -1240,26 +1310,6 @@

    Default Jinja filters - - - - @@ -1280,7 +1330,7 @@

    Default Jinja filters - + @@ -1335,10 +1385,10 @@

    Default Jinja filters - + - + diff --git a/dev/recipe_file/index.html b/dev/reference/recipe_file/index.html similarity index 96% rename from dev/recipe_file/index.html rename to dev/reference/recipe_file/index.html index 8eca31ef8..59b8f6bd4 100644 --- a/dev/recipe_file/index.html +++ b/dev/reference/recipe_file/index.html @@ -8,16 +8,16 @@ - + - + - + - + @@ -26,10 +26,10 @@ - + - + @@ -48,9 +48,9 @@ - + - + @@ -63,7 +63,7 @@ - + @@ -71,7 +71,7 @@ - + @@ -79,7 +79,7 @@ - + @@ -124,7 +124,7 @@ - + + @@ -842,11 +583,11 @@
  • - + - Advanced options + Terminal User Interface (TUI) @@ -862,11 +603,11 @@
  • - + - Jinja functions + Authentication & upload @@ -882,11 +623,11 @@
  • - + - Experimental features + Automatic recipe linting @@ -902,11 +643,11 @@
  • - + - Selector syntax + Testing packages @@ -922,11 +663,11 @@
  • - + - Build script + Reproducible builds @@ -942,11 +683,11 @@
  • - + - Variants + Package specification @@ -962,11 +703,11 @@
  • - + - Compilers and cross compilation + Activation scripts @@ -982,11 +723,11 @@
  • - + - CLI Usage + Internals @@ -999,114 +740,456 @@ + + + + + + + + + + + + + +
  • + + + + + + + + + - - - - -
  • - - - - - Internals - - - -
  • @@ -1535,7 +1607,7 @@ - + @@ -1614,7 +1686,7 @@

    Spec referencehere.

    -

    See more in the automatic linting chapter.

    +

    See more in the automatic linting chapter.

    @@ -2614,10 +2686,10 @@

    Jinja functions - + - + diff --git a/dev/search/search_index.json b/dev/search/search_index.json index 597a3dc24..4b7782baa 100644 --- a/dev/search/search_index.json +++ b/dev/search/search_index.json @@ -1 +1 @@ -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":""},{"location":"#rattler-build-a-fast-conda-package-builder","title":"rattler-build: A Fast Conda Package Builder","text":"

    The rattler-build tooling and library creates cross-platform relocatable binaries / packages from a simple recipe format. The recipe format is heavily inspired by conda-build and boa, and the output of a regular rattler-build run is a package that can be installed using mamba, rattler or conda.

    rattler-build does not have any dependencies on conda-build or Python and works as a standalone binary.

    "},{"location":"#installation","title":"Installation","text":"

    You can grab a prerelease version of rattler-build from the Github Releases.

    It is (of course) also available from conda-forge:

    pixi global install rattler-build\n# or with micromamba\nmicromamba install rattler-build -c conda-forge\n

    Alternatively, you can install rattler-build via Homebrew:

    brew install rattler-build\n

    rattler-build is also available on Arch Linux in the extra repository:

    pacman -S rattler-build\n
    "},{"location":"#dependencies","title":"Dependencies","text":"

    Currently rattler-build needs some dependencies on the host system which are executed as subprocess. We plan to reduce the number of external dependencies over time by writing what we need in Rust to make rattler-build fully self-contained.

    • tar to unpack tarballs downloaded from the internet in a variety of formats. .gz, .bz2 and .xz are widely used and one might have to install the compression packages as well (e.g. gzip, bzip2, ...)
    • patch to patch source code after downloading
    • install_name_tool is necessary on macOS to rewrite the rpath of shared libraries and executables to make it relative
    • patchelf is required on Linux to rewrite the rpath and runpath of shared libraries and executables
    • git to checkout Git repositories (not implemented yet, but will require git in the future)
    • msvc on Windows because we cannot ship the MSVC compiler on conda-forge (needs to be installed on the host machine)

    On Windows, to obtain these dependencies from conda-forge, one can install m2-patch, m2-bzip2, m2-gzip, m2-tar.

    "},{"location":"#documentation","title":"Documentation","text":"

    We have extensive documentation for rattler-build. You can find the book here.

    "},{"location":"#github-action","title":"GitHub Action","text":"

    There is a GitHub Action for rattler-build. It can be used to install rattler-build in CI/CD workflows and run a build command. Please check out the GitHub Action documentation for more information.

    "},{"location":"#usage","title":"Usage","text":"

    rattler-build comes with two commands: build and test.

    The build command takes a --recipe recipe.yaml as input and produces a package as output. The test subcommand can be used to test existing packages (tests are shipped with the package).

    "},{"location":"#the-recipe-format","title":"The recipe format","text":"

    Note You can find all examples below in the examples folder in the codebase and run them with rattler-build.

    A simple example recipe for the xtensor header-only C++ library:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n\ncontext:\n  name: xtensor\n  version: 0.24.6\n\npackage:\n  name: ${{ name|lower }}\n  version: ${{ version }}\n\nsource:\n  url: https://github.com/xtensor-stack/xtensor/archive/${{ version }}.tar.gz\n  sha256: f87259b51aabafdd1183947747edfff4cff75d55375334f2e81cee6dc68ef655\n\nbuild:\n  number: 0\n  script:\n    - if: win\n      then: |\n        cmake -G \"NMake Makefiles\" -D BUILD_TESTS=OFF -D CMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% %SRC_DIR%\n        nmake\n        nmake install\n      else: |\n        cmake ${CMAKE_ARGS} -DBUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=$PREFIX $SRC_DIR -DCMAKE_INSTALL_LIBDIR=lib\n        make install\n\nrequirements:\n  build:\n    - ${{ compiler('cxx') }}\n    - cmake\n    - if: unix\n      then: make\n  host:\n    - xtl >=0.7,<0.8\n  run:\n    - xtl >=0.7,<0.8\n  run_constraints:\n    - xsimd >=8.0.3,<10\n\ntests:\n  - script:\n    - if: unix or emscripten\n      then:\n        - test -d ${PREFIX}/include/xtensor\n        - test -f ${PREFIX}/include/xtensor/xarray.hpp\n        - test -f ${PREFIX}/share/cmake/xtensor/xtensorConfig.cmake\n        - test -f ${PREFIX}/share/cmake/xtensor/xtensorConfigVersion.cmake\n    - if: win\n      then:\n        - if not exist %LIBRARY_PREFIX%\\include\\xtensor\\xarray.hpp (exit 1)\n        - if not exist %LIBRARY_PREFIX%\\share\\cmake\\xtensor\\xtensorConfig.cmake (exit 1)\n        - if not exist %LIBRARY_PREFIX%\\share\\cmake\\xtensor\\xtensorConfigVersion.cmake (exit 1)\n\nabout:\n  homepage: https://github.com/xtensor-stack/xtensor\n  license: BSD-3-Clause\n  license_file: LICENSE\n  summary: The C++ tensor algebra library\n  description: Multi dimensional arrays with broadcasting and lazy computing\n  documentation: https://xtensor.readthedocs.io\n  repository: https://github.com/xtensor-stack/xtensor\n\nextra:\n  recipe-maintainers:\n    - some-maintainer\n

    A recipe for the rich Python package (using noarch):

    context:\n  version: \"13.4.2\"\n\npackage:\n  name: \"rich\"\n  version: ${{ version }}\n\nsource:\n  - url: https://pypi.io/packages/source/r/rich/rich-${{ version }}.tar.gz\n    sha256: d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898\n\nbuild:\n  # Thanks to `noarch: python` this package works on all platforms\n  noarch: python\n  script:\n    - python -m pip install . -vv --no-deps --no-build-isolation\n\nrequirements:\n  host:\n    - pip\n    - poetry-core >=1.0.0\n    - python 3.10\n  run:\n    # sync with normalized deps from poetry-generated setup.py\n    - markdown-it-py >=2.2.0\n    - pygments >=2.13.0,<3.0.0\n    - python 3.10\n    - typing_extensions >=4.0.0,<5.0.0\n\ntests:\n  - python:\n      imports:\n        - rich\n      pip_check: true\n\nabout:\n  homepage: https://github.com/Textualize/rich\n  license: MIT\n  license_file: LICENSE\n  summary: Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal\n  description: |\n    Rich is a Python library for rich text and beautiful formatting in the terminal.\n\n    The Rich API makes it easy to add color and style to terminal output. Rich\n    can also render pretty tables, progress bars, markdown, syntax highlighted\n    source code, tracebacks, and more \u2014 out of the box.\n  documentation: https://rich.readthedocs.io\n  repository: https://github.com/Textualize/rich\n

    A recipe for the curl library:

    context:\n  version: \"8.0.1\"\n\npackage:\n  name: curl\n  version: ${{ version }}\n\nsource:\n  url: http://curl.haxx.se/download/curl-${{ version }}.tar.bz2\n  sha256: 9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf\n\nbuild:\n  number: 0\n\nrequirements:\n  build:\n    - ${{ compiler('c') }}\n    - if: win\n      then:\n        - cmake\n        - ninja\n    - if: unix\n      then:\n        - make\n        - perl\n        - pkg-config\n        - libtool\n  host:\n    - if: linux\n      then:\n        - openssl\n\nabout:\n  homepage: http://curl.haxx.se/\n  license: MIT/X derivate (http://curl.haxx.se/docs/copyright.html)\n  license_file: COPYING\n  summary: tool and library for transferring data with URL syntax\n  description: |\n    Curl is an open source command line tool and library for transferring data\n    with URL syntax. It is used in command lines or scripts to transfer data.\n  documentation: https://curl.haxx.se/docs/\n  repository: https://github.com/curl/curl\n

    For the curl library recipe, two additional script files (build.sh and build.bat) are needed.

    build.sh

    #!/bin/bash\n\n# Get an updated config.sub and config.guess\ncp $BUILD_PREFIX/share/libtool/build-aux/config.* .\n\nif [[ $target_platform =~ linux.* ]]; then\n    USESSL=\"--with-openssl=${PREFIX}\"\nelse\n    USESSL=\"--with-secure-transport\"\nfi;\n\n./configure \\\n    --prefix=${PREFIX} \\\n    --host=${HOST} \\\n    ${USESSL} \\\n    --with-ca-bundle=${PREFIX}/ssl/cacert.pem \\\n    --disable-static --enable-shared\n\nmake -j${CPU_COUNT} ${VERBOSE_AT}\nmake install\n\n# Includes man pages and other miscellaneous.\nrm -rf \"${PREFIX}/share\"\n

    build.bat

    mkdir build\n\ncmake -GNinja ^\n      -DCMAKE_BUILD_TYPE=Release ^\n      -DBUILD_SHARED_LIBS=ON ^\n      -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^\n      -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX% ^\n      -DCURL_USE_SCHANNEL=ON ^\n      -DCURL_USE_LIBSSH2=OFF ^\n      -DUSE_ZLIB=ON ^\n      -DENABLE_UNICODE=ON ^\n      %SRC_DIR%\n\nIF %ERRORLEVEL% NEQ 0 exit 1\n\nninja install --verbose\n
    "},{"location":"authentication_and_upload/","title":"Server authentication","text":""},{"location":"authentication_and_upload/#authenticating-with-a-server","title":"Authenticating with a server","text":"

    You may want to use private channels for which you need to be authenticated. To do this ephemerally you can use the RATTLER_AUTH_FILE environment variable to point to a JSON file with the following structure:

    {\n    \"*.prefix.dev\": {\n        \"BearerToken\": \"your_token\"\n    },\n    \"otherhost.com\": {\n        \"BasicHttp\": {\n            \"username\": \"your_username\",\n            \"password\": \"your_password\"\n        }\n    },\n    \"anaconda.org\": {\n        \"CondaToken\": \"your_token\"\n    }\n}\n

    The keys are the host names. You can use wildcard specifiers here (e.g. *.prefix.dev to match all subdomains of prefix.dev, such as repo.prefix.dev). This will allow you to also obtain packages from any private channels that you have access to.

    The following known authentication methods are supported:

    • BearerToken: prefix.dev
    • CondaToken: anaconda.org, quetz
    • BasicHttp: artifactory
    "},{"location":"authentication_and_upload/#uploading-packages","title":"Uploading packages","text":"

    If you want to upload packages, then rattler-build comes with a built-in upload command. There are 4 options:

    • prefix.dev: you can create public or private channels on the prefix.dev hosted server
    • anaconda.org: you can upload packages to the free anaconda.org server
    • quetz: you can host your own quetz server and upload packages to it
    • artifactory: you can upload packages to a JFrog Artifactory server

    The command is:

    rattler-build upload <server> <package_files>\n

    Note: you can also use the RATTLER_AUTH_FILE environment variable to authenticate with the server.

    "},{"location":"authentication_and_upload/#prefixdev","title":"prefix.dev","text":"

    To upload to prefix.dev, you need to have an account and a token. You can create a token in the settings of your account. The token is used to authenticate the upload.

    export PREFIX_API_KEY=<your_token>\nrattler-build upload prefix -c <channel> <package_files>\n

    You can also use the --api-key=$PREFIX_API_KEY option to pass the token directly to the command. Note that you need to have created the channel on the prefix.dev website before you can upload to it.

    "},{"location":"authentication_and_upload/#quetz","title":"Quetz","text":"

    You need to pass a token and API key to upload to a channel on your own Quetz server. The token is used to authenticate the upload.

    export QUETZ_API_KEY=<your_token>\nrattler-build upload quetz -u <url> -c <channel> <package_files>\n
    "},{"location":"authentication_and_upload/#artifactory","title":"Artifactory","text":"

    To upload to an Artifactory server, you need to pass a username and password. The username and password are used to authenticate the upload.

    export ARTIFACTORY_USERNAME=<your_username>\nexport ARTIFACTORY_PASSWORD=<your_password>\nrattler-build upload artifactory -u <url> -c <channel> <package_files>\n
    "},{"location":"authentication_and_upload/#anacondaorg","title":"anaconda.org","text":"

    To upload to anaconda.org, you need to specify the owner and API key. The API key is used to authenticate the upload.

    The owner is the owner of the distribution, for example, your user name or organization.

    One can also specify a label such as dev for release candidates using the -c flag. The default value is main.

    You can also add the --force argument to forcibly upload a new package (and overwrite any existing ones).

    export ANACONDA_API_KEY=<your_token>\nrattler-build upload anaconda -o <your_username> -c <label> <package_files>\n
    "},{"location":"automatic_linting/","title":"Automatic linting in VSCode","text":"

    The new recipe format comes with a strict JSON scheme. You can find the scheme in this repository.

    It is implemented with pydantic and renders to a JSON schema file. The YAML language server extension in VSCode can recognize the scheme and give helpful hints during editing.

    With the YAML language server installed, the automatic linting can be enabled by adding the following line to the top of the recipe file:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n
    "},{"location":"available_jinja/","title":"Jinja functions that can be used in the recipe","text":"

    rattler-build comes with a couple of useful helpers that can be used in the recipe.

    "},{"location":"available_jinja/#functions","title":"Functions","text":""},{"location":"available_jinja/#the-compiler-function","title":"The compiler function","text":"

    The compiler function can be used to put together a compiler that works for the current platform and the compilation \"target_platform\". The syntax looks like: ${{ compiler('c') }} where 'c' signifies the programming language that is used.

    This function evaluates to <compiler>_<target_platform> <compiler_version>. For example, when compiling on linux and to linux-64, this function evaluates to gcc_linux-64.

    The values can be influenced by the variant_configuration. The <lang>_compiler and <lang>_compiler_version variables are the keys with influence. See below for an example:

    "},{"location":"available_jinja/#usage-in-a-recipe","title":"Usage in a recipe","text":"recipe.yaml
    requirements:\n  build:\n    - ${{ compiler('c') }}\n

    With a corresponding variant_configuration:

    variant_configuration.yaml
    c_compiler:\n- clang\nc_compiler_version:\n- 9.0\n

    The variables shown above would select the clang compiler in version 9.0. Note that the final output will still contain the target_platform, so that the full compiler will read clang_linux-64 9.0 when compiling with --target-platform linux-64.

    rattler-build defines some default compilers for the following languages (inherited from conda-build):

    • c: gcc on Linux, clang on osx and vs2017 on Windows
    • cxx: gxx on Linux, clangxx on osx and vs2017 on Windows
    • fortran: gfortran on Linux, gfortran on osx and vs2017 on Windows
    • rust: rust
    "},{"location":"available_jinja/#the-stdlib-function","title":"The stdlib function","text":"

    The stdlib function closely mirrors the compiler function. It can be used to put together a standard library that works for the current platform and the compilation \"target_platform\".

    Usage: ${{ stdlib('c') }}

    Results in <stdlib>_<target_platform> <stdlib_version>. And uses the variant variables <lang>_stdlib and <lang>_stdlib_version to influence the output.

    "},{"location":"available_jinja/#usage-in-a-recipe_1","title":"Usage in a recipe:","text":"recipe.yaml
    requirements:\n  build:\n    # these are usually paired!\n    - ${{ compiler('c') }}\n    - ${{ stdlib('c') }}\n

    With a corresponding variant_configuration:

    variant_configuration.yaml
    # these are the values `conda-forge` uses in their pinning file\n# found at https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/main/recipe/conda_build_config.yaml\nc_stdlib:\n- sysroot\nc_stdlib_version:\n- 2.17\n
    "},{"location":"available_jinja/#the-pin-functions","title":"The pin functions","text":"

    A pin is created based on the version input (from a subpackage or a package resolution).

    The pin functions take the following five arguments:

    • min_pin (default: \"x.x.x.x.x.x\"): The minimum pin to be used. When set to None, no lower bound is set.
    • max_pin (default: \"x\"): The maximum pin to be used. When set to None, no upper bound is set.

    These \"pins\" are applied to the version input to create the lower and upper bounds. For example, if the version is 3.10.5 with min_pin=\"x.x\", max_pin=\"x.x.x\", the lower bound will be 3.10 and the upper bound will be 3.10.6.0a0. The max_pin will increment the last selected segment of the version by 1, and append .0a0 to the end to prevent any alpha versions from being selected.

    If the last segment of the version contains a letter (e.g. 9e or 1.1.1j), then incrementing the version will set that letter to a, e.g. 9e will become 10a, and 1.1.1j will become 1.1.2a. In this case, also no 0a0 is appended to the end.

    Sometimes you want to strongly connect your outputs. This can be achieved with the following input:

    • exact=True (default: False): This will pin the version exactly to the version of the output, incl. the build string.

    To override the lower or upper bound with a hard-coded value, you can use the following input:

    • lower_bound (default: None): This will override the lower bound with the given value.
    • upper_bound (default: None): This will override the upper bound with the given value.

    Both lower_bound and upper_bound expect a valid version string (e.g. 1.2.3).

    "},{"location":"available_jinja/#the-pin_subpackage-function","title":"The pin_subpackage function","text":"
    • ${{ pin_subpackage(\"mypkg\", min_pin=\"x.x\", max_pin=\"x.x\") }} creates a pin to another output in the recipe. With an input of 3.1.5, this would create a pin of mypkg >=3.1,<3.2.0a0.
    • ${{ pin_subpackage(\"other_output\", exact=True) }} creates a pin to another output in the recipe with an exact version.
    • ${{ pin_subpackage(\"other_output\", lower_bound=\"1.2.3\", upper_bound=\"1.2.4\") }} creates a pin to another output in the recipe with a lower bound of 1.2.3 and an upper bound of 1.2.4. This is equivalent to writing other_output >=1.2.3,<1.2.4.
    "},{"location":"available_jinja/#the-pin_compatible-function","title":"The pin_compatible function","text":"

    The pin compatible function works exactly as the pin_subpackage function, but it pins the package in the run requirements based on the resolved package of the host or build section.

    • pin_compatible pins a package in the run requirements based on the resolved package of the host or build section.
    "},{"location":"available_jinja/#the-cdt-function","title":"The cdt function","text":"
    • ${{ cdt(\"mypkg\") }} creates a cross-dependency to another output in the recipe.

    This function helps add Core Dependency Tree packages as dependencies by converting packages as required according to hard-coded logic. See below for an example of how this function can be used:

    # on x86_64 system\ncdt('package-name') # outputs: package-name-cos6-x86_64\n# on aarch64 system\ncdt('package-name') # outputs: package-name-cos6-aarch64\n
    "},{"location":"available_jinja/#the-hash-variable","title":"The hash variable","text":"
    • ${{ hash }} is the variant hash and is useful in the build string computation.
    "},{"location":"available_jinja/#the-version_to_buildstring-function","title":"The version_to_buildstring function","text":"
    • ${{ python | version_to_buildstring }} converts a version from the variant to a build string (it removes the . character and takes only the first two elements of the version).
    "},{"location":"available_jinja/#the-env-object","title":"The env object","text":"

    You can use the env object to retrieve environment variables and forward them to your build script. There are two ways to do this:

    • ${{ env.get(\"MY_ENV_VAR\") }} will return the value of the environment variable MY_ENV_VAR or throw an error if it is not set.
    • ${{ env.get_default(\"MY_ENV_VAR\", \"default_value\") }} will return the value of the environment variable MY_ENV_VAR or \"default_value\" if it is not set.

    You can also check for the existence of an environment variable:

    • ${{ env.exists(\"MY_ENV_VAR\") }} will return true if the environment variable MY_ENV_VAR is set and false otherwise.
    "},{"location":"available_jinja/#default-jinja-filters","title":"Default Jinja filters","text":"

    The following Jinja filters are available: lower, upper, indexing into characters (e.g. https://myurl.com/{{ name[0] }}/{{ name | lower }}_${{ version }}.tar.gz).

    Navigate to the Minijinja documentation for a list of all available built-in filters.

    "},{"location":"build_options/","title":"Advanced build options","text":"

    There are some specialized build options to control various features:

    • prefix replacement
    • variant configuration
    • encoded file type

    These are all found under the build key in the recipe.yaml.

    "},{"location":"build_options/#always-include-and-always-copy-files","title":"Always include and always copy files","text":"

    There are some options that control the inclusion of files in the final package.

    The always_include_files option can be used to include files even if they are already in the environment as part of some other host dependency. This is normally \"clobbering\" and should be used with caution (since packages should not have any overlapping files).

    The always_copy_files option can be used to copy files instead of linking them. This is useful for files that might be modified inside the environment (e.g. configuration files). Normally, files are linked from a central cache into the environment to save space \u2013 that means that files modified in one environment will be modified in all environments. This is not always desirable, and in that case you can use the always_copy_files option.

    How always_copy_files works

    The always_copy_files option works by setting the no_link option in the info/paths.json to true for the files in question. This means that the files are copied instead of linked when the package is installed.

    recipe.yaml
    build:\n  # include files even if they are already in the environment\n  # as part of some other host dependency\n  always_include_files: list of globs\n\n  # do not soft- or hard-link these files, but always copy them was `no_link`\n  always_copy_files: list of globs\n

    Glob patterns

    Glob patterns are used througout the build options to specify files. The patterns are matched against the relative path of the file in the build directory. Patterns can contain * to match any number of characters, ? to match a single character, and ** to match any number of directories.

    For example:

    • *.txt matches all files ending in .txt
    • **/*.txt matches all files ending in .txt in any directory
    • **/test_*.txt matches all files starting with test_ and ending in .txt in any directory
    "},{"location":"build_options/#merge-build-and-host-environments","title":"Merge build and host environments","text":"

    In very rare cases you might want to merge the build and host environments to obtain the \"legacy\" behavior of conda-build.

    recipe.yaml
    build:\n  # merge the build and host environments (used in many R packages on Windows)\n  merge_build_and_host_envs: bool (defaults to false)\n
    "},{"location":"build_options/#prefix-detection-replacement-options","title":"Prefix detection / replacement options","text":"

    During installation time the \"install\"-prefix is injected into text and binary files. Sometimes this is not desired, and sometimes the user might want closer control over the automatic text/binary detection.

    The main difference between prefix replacement for text and binary files is that for binary files, the prefix string is padded with null bytes to match the length of the original prefix. The original prefix is the very long placeholder string that you might have seen in the build process.

    On Windows, binary prefix replacement is never performed.

    recipe.yaml
    package:\n  name: mypackage\n  version: 1.0\n\nbuild:\n  # settings concerning the prefix detection in files\n  prefix_detection:\n    # force the file type of the given files to be TEXT or BINARY\n    # for prefix replacement\n    force_file_type:\n      # force TEXT file type (list of globs)\n      text: list of globs\n      # force binary file type (list of globs)\n      binary: list of globs\n\n    # ignore all or specific files for prefix replacement`\n    ignore: bool | [path] (defaults to false)\n\n    # wether to detect binary files with prefix or not\n    # defaults to true on Unix and (always) false on Windows\n    ignore_binary_files: bool\n
    "},{"location":"build_options/#variant-configuration","title":"Variant configuration","text":"

    To control the variant precisely you can use the \"variant configuration\" options.

    A variant package has the same version number, but different \"hash\" and potentially different dependencies or build options. Variant keys are extracted from the variant_config.yaml file and usually any used Jinja variables or dependencies without version specifier are used as variant keys.

    Variant keys can also be forcibly set or ignored with the use_keys and ignore_keys options.

    In order to decide which of the variant packages to prefer and install by default, the down_prioritize_variant option can be used. The higher the value, the less preferred the variant is.

    More about variants can be found in the variant documentation.

    The following options are available in the build section to control the variant configuration:

    recipe.yaml
    build:\n  # settings for the variant\n  variant:\n    # Keys to forcibly use for the variant computation\n    # even if they are not in the dependencies\n    use_keys: list of strings\n\n    # Keys to forcibly ignore for the variant computation\n    # even if they are in the dependencies\n    ignore_keys: list of strings\n\n    # used to prefer this variant less\n    down_prioritize_variant: integer (defaults to 0, higher is less preferred)\n
    "},{"location":"build_options/#dynamic-linking-configuration","title":"Dynamic linking configuration","text":"

    After the package is built, rattler-build performs some \"post-processing\" on the binaries and libraries.

    This entails making the shared libraries relocatable and checking that all linked libraries are present in the run requirements. The following settings control this behavior.

    With the rpath option you can forcibly set the rpath of the shared libraries. The path is relative to the install prefix. Any rpath setting is ignored on Windows.

    The rpath_allowlist option can be used to allow the rpath to point to locations outside of the environment. This is useful if you want to link against libraries that are not part of the conda environment (e.g. proprietary software).

    If you want to stop rattler-build from relocating the binaries, you can set binary_relocation to false. If you want to only relocate some binaries, you can select the relevant ones with a glob pattern.

    To read more about rpaths and how rattler-build creates relocatable binary packages, see the internals docs.

    If you link against some libraries (possibly even outside of the prefix, in a system location), then you can use the missing_dso_allowlist to allow linking against these and suppress any warnings. This list is pre-populated with a list of known system libraries on the different operating systems.

    As part of the post-processing, rattler-build checks for overlinking and overdepending. \"Overlinking\" is when a binary links against a library that is not specified in the run requirements. This is usually a mistake because the library would not be present in the environment when the package is installed.

    Conversely, \"overdepending\" is when a library is part of the run requirements, but is not actually used by any of the binaries/libraries in the package.

    recipe.yaml
    build:\n  # settings for shared libraries and executables\n  dynamic_linking:\n    # linux only, list of rpaths relative to the installation prefix\n    rpaths: list of paths (defaults to ['lib/'])\n\n    # Allow runpath / rpath to point to these locations\n    # outside of the environment\n    rpath_allowlist: list of globs\n\n    # wether to relocate binaries or not. If this is a list of paths, then\n    # only the listed paths are relocated\n    binary_relocation: bool (defaults to true) | list of globs\n\n    # Allow linking against libraries that are not in the run requirements\n    missing_dso_allowlist: list of globs\n\n    # what to do when detecting overdepending\n    overdepending_behavior: \"ignore\" or \"error\" # (defaults to \"error\")\n\n    # what to do when detecting overlinking\n    overlinking_behavior: \"ignore\" or \"error\" # (defaults to \"error\")\n
    "},{"location":"build_script/","title":"Build scripts","text":"

    The build.sh file is the build script for Linux and macOS and build.bat is the build script for Windows. These scripts contain the logic that carries out your build steps. Anything that your build script copies into the $PREFIX or %PREFIX% folder will be included in your output package.

    For example, this build.sh:

    mkdir -p $PREFIX/bin\ncp $RECIPE_DIR/my_script_with_recipe.sh $PREFIX/bin/super-cool-script.sh\n

    There are many environment variables defined for you to use in build.sh and build.bat. Please see environment variables for more information.

    build.sh and build.bat are optional. You can instead use the build/script key in your recipe.yaml, with each value being either a string command or a list of string commands. Any commands you put there must be able to run on every platform for which you build. For example, you can't use the cp command because cmd.exe won't understand it on Windows.

    build.sh is run with bash and build.bat is run with cmd.exe.

    "},{"location":"build_script/#environment-variables","title":"Environment variables","text":""},{"location":"build_script/#environment-variables-set-during-the-build-process","title":"Environment variables set during the build process","text":"

    During the build process, the following environment variables are set, on Windows with build.bat and on macOS and Linux with build.sh. By default, these are the only variables available to your build script. Unless otherwise noted, no variables are inherited from the shell environment in which you invoke conda-build. To override this behavior, see :ref:inherited-env-vars.

    ARCH Either 32 or 64, to specify whether the build is 32-bit or 64-bit. The value depends on the ARCH environment variable and defaults to the architecture the interpreter running conda was compiled with. CMAKE_GENERATOR The CMake generator string for the current build environment. On Linux systems, this is always Unix Makefiles. On Windows, it is generated according to the Visual Studio version activated at build time, for example, Visual Studio 9 2008 Win64. CONDA_BUILD=1 Always set to indicate that the conda-build process is running. CPU_COUNT Represents the number of CPUs on the system. SHLIB_EXT Denotes the shared library extension specific to the operating system (e.g. .so for Linux, .dylib for macOS, and .dll for Windows). HTTP_PROXY Inherited from the user's shell environment, specifying the HTTP proxy settings. HTTPS_PROXY Similar to HTTP_PROXY, this is inherited from the user's shell environment and specifies the HTTPS proxy settings. LANG Inherited from the user's shell environment, defining the system language and locale settings. MAKEFLAGS Inherited from the user's shell environment. This can be used to set additional arguments for the make command, such as -j2 to utilize 2 CPU cores for building the recipe. PY_VER Specifies the Python version against which the build is occurring. This can be modified with a variant_config.yaml file. PATH Inherited from the user's shell environment and augmented with the activated host and build prefixes. PREFIX The build prefix to which the build script should install the software. PKG_BUILDNUM Indicates the build number of the package currently being built. PKG_NAME The name of the package that is being built. PKG_VERSION The version of the package currently under construction. PKG_BUILD_STRING The complete build string of the package being built, including the hash (e.g. py311h21422ab_0). PKG_HASH Represents the hash of the package being built, excluding the leading 'h' (e.g. 21422ab). This is applicable from conda-build 3.0 onwards. PYTHON The path to the Python executable in the host prefix. Python is installed in the host prefix only when it is listed as a host requirement. R The path to the R executable in the build prefix. R is installed in the build prefix only when it is listed as a build requirement. RECIPE_DIR The directory where the recipe is located. SP_DIR The location of Python's site-packages, where Python libraries are installed. SRC_DIR The path to where the source code is unpacked or cloned. If the source file is not a recognized archive format, this directory contains a copy of the source file. STDLIB_DIR The location of Python's standard library. build_platform Represents the native subdirectory of the conda executable, indicating the platform for which the build is occurring.

    Removed from conda-build are: - NPY_VER - PY3K

    "},{"location":"build_script/#windows","title":"Windows","text":"

    Unix-style packages on Windows are built in a special Library directory under the build prefix. The environment variables listed in the following table are defined only on Windows.

    Variable Description LIBRARY_BIN <build prefix>\\Library\\bin. LIBRARY_INC <build prefix>\\Library\\include. LIBRARY_LIB <build prefix>\\Library\\lib. LIBRARY_PREFIX <build prefix>\\Library. SCRIPTS <build prefix>\\Scripts.

    Not yet supported in rattler-build:

    • CYGWIN_PREFIX
    • VS_MAJOR
    • VS_VERSION
    • VS_YEAR

    Additionally, the following variables are forwarded from the environment:

    • ALLUSERSPROFILE
    • APPDATA
    • CommonProgramFiles
    • CommonProgramFiles(x86)
    • CommonProgramW6432
    • COMPUTERNAME
    • ComSpec
    • HOMEDRIVE
    • HOMEPATH
    • LOCALAPPDATA
    • LOGONSERVER
    • NUMBER_OF_PROCESSORS
    • PATHEXT
    • ProgramData
    • ProgramFiles
    • ProgramFiles(x86)
    • ProgramW6432
    • PROMPT
    • PSModulePath
    • PUBLIC
    • SystemDrive
    • SystemRoot
    • TEMP
    • TMP
    • USERDOMAIN
    • USERNAME
    • USERPROFILE
    • windir
    • PROCESSOR_ARCHITEW6432
    • PROCESSOR_ARCHITECTURE
    • PROCESSOR_IDENTIFIER
    "},{"location":"build_script/#unix","title":"Unix","text":"

    The environment variables listed in the following table are defined only on macOS and Linux.

    Variable Description HOME Standard $HOME environment variable. PKG_CONFIG_PATH Path to pkgconfig directory, defaults to `$PREFIX/lib/pkgconfig SSL_CERT_FILE Path to SSL_CERT_FILE file. CFLAGS Empty, can be forwarded from env to set additional arguments to C compiler. CXXFLAGS Same as CFLAGS for C++ compiler. LDFLAGS Empty, additional flags to be passed to the linker when linking object files into an executable or shared object."},{"location":"build_script/#macos","title":"macOS","text":"

    The environment variables listed in the following table are defined only on macOS.

    Variable Description MACOSX_DEPLOYMENT_TARGET Same as the Anaconda Python macOS deployment target. Currently 10.9 for intel 32- and 64bit macOS, and 11.0 for arm64. OSX_ARCH i386 or x86_64 or arm64, depending on the target platform"},{"location":"build_script/#linux","title":"Linux","text":"

    The environment variable listed in the following table is defined only on Linux.

    Variable Description LD_RUN_PATH Defaults to <build prefix>/lib. QEMU_LD_PREFIX The prefix used by QEMU's user mode emulation for library paths. QEMU_UNAME Set qemu uname release string to 'uname'. DEJAGNU The path to the dejagnu testing framework used by the GCC test suite. DISPLAY The X11 display to use for graphical applications. BUILD Target triple ({build_arch}-conda_{build_distro}-linux-gnu) where build_distro is one of cos6 or cos7, for Centos 6 or 7"},{"location":"cli_usage/","title":"CLI usage","text":""},{"location":"cli_usage/#shell-completions","title":"Shell Completions","text":"

    We support shell completions through clap_complete. You can generate them for your shell using the completion command.

    You can add the completions to your shell by adding the following to your shell's configuration file:

    # For bash (add this to ~/.bashrc)\neval \"$(rattler-build completion --shell=bash)\"\n# For zsh (add this to ~/.zshrc)\neval \"$(rattler-build completion --shell=zsh)\"\n# For fish (add this to ~/.config/fish/config.fish)\nrattler-build completion --shell=fish | source\n

    Ensure that wherever you install rattler-build is pointed to by your PATH (for zsh or equivalent in other shells), after which point you can use TAB or any configured completion key of choice.

    $ rattler-build <TAB>\nbuild    -- Build a package\nhelp     -- Print this message or the help of the given subcommand(s)\nrebuild  -- Rebuild a package\ntest     -- Test a package\n\n## Package format\n\nYou can specify the package format (either `.tar.bz2` or `.conda`) by using the `--package-format` flag.\nYou can also set the compression level with `:<level>` after the package format. The `<level>` can be `max`, `min`, `default` or a number corresponding to the compression level.\n`.tar.bz2` supports compression levels between `1` and `9` while `.conda` supports compression levels between `-7` and `22`.\nFor `.conda`, you can also set the `--compression-threads` flag to specify the number of threads to use for compression.\n\n```sh\n# default\nrattler-build build --package-format tarbz2 -r recipe/recipe.yaml\n# maximum compression with 10 threads\nrattler-build build --package-format conda:max --compression-threads 10 -r recipe/recipe.yaml\n
    "},{"location":"cli_usage/#logs","title":"Logs","text":"

    rattler-build knows three different log styles: fancy, plain, and json. You can configure them with the --log-style=<style> flag:

    # default\nrattler-build build --log-style fancy -r recipe/recipe.yaml\n
    "},{"location":"cli_usage/#github-integration","title":"GitHub integration","text":"

    rattler-build also has a GitHub integration. With this integration, warnings are automatically emitted in the GitHub Actions log and a summary is generated and posted to the GitHub Actions summary page.

    To make use of this integration, we recommend using our custom GitHub action: rattler-build-action. To manually enable it, you can set the environment variable RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION=true.

    "},{"location":"compilers/","title":"Compilers and cross-compilation","text":"

    To use a compiler in your project, it's best to use the ${{ compiler('lang') }} template function. The compiler function works by taking a language, determining the configured compiler for that language, and adding some information about the target platform to the selected compiler. To configure a compiler for a specific language, the variant_config.yaml file can be used.

    For example, in a recipe that uses a C-compiler, you can use the following code:

    requirements:\n  build:\n    - ${{ compiler('c') }}\n

    To set the compiler that you want to use, create a variant config that looks like the following:

    c_compiler:\n  - gcc\n\n# optionally you can specify a version\nc_compiler_version:\n  - 9.3.0\n

    When the template function is evaluated, it will look something like: gcc_linux-64 9.3.0. You can define your own compilers. For example, for Rust you can use ${{ compiler('rust') }} and rust_compiler_{version} in your variant config.

    "},{"location":"compilers/#cross-compilation","title":"Cross-compilation","text":"

    Cross-compilation is supported by rattler-build and the compiler template function is part of what makes it possible. When you want to cross-compile from linux-64 to linux-aarch64 (i.e. intel to ARM), you can pass --target-platform linux-aarch64 to the rattler-build command. This will cause the compiler template function to select a compiler that is configured for linux-aarch64. The above example would resolve to gcc_linux-aarch64 9.3.0. Provided that the package is available for linux-64 (your build platform), the compilation should succeed.

    The distinction between the build and host sections begins to make sense when thinking about cross-compilation. The build environment is resolved to packages that need to run at compilation time. For example, cmake, gcc, and autotools are all tools that need to be executed. Therefore, the build environment resolves to packages for the linux-64 architecture (in our example). On the other hand, the host packages resolve to linux-aarch64 - those are packages that we want to link against.

    # packages that need to run at build time (cmake, gcc, autotools, etc.)\n# in the platform that rattler-build is executed on (the build_platform)\nbuild:\n  - cmake\n  - ${{ compiler('c') }}\n# packages that we want to link against in the architecture we are\n# cross-compiling to the target_platform\nhost:\n  - libcurl\n  - openssl\n
    "},{"location":"experimental_features/","title":"Experimental features","text":"

    Warning

    These are experimental features of rattler-build and may change or go away completely.

    Currently only the build and rebuild commands support the following experimental features.

    To enable them, use the --experimental flag with the command. Or, use the environment variable, RATTLER_BUILD_EXPERIMENTAL=1.

    "},{"location":"experimental_features/#jinja-functions","title":"Jinja functions","text":""},{"location":"experimental_features/#load_from_filefile_path","title":"load_from_file(<file_path>)","text":"

    The Jinja function load_from_file allows loading from files; specifically, it allows loading from toml, json, and yaml file types to an object to allow it to fetch things directly from the file. It loads all other files as strings.

    "},{"location":"experimental_features/#usage","title":"Usage","text":"

    load_from_file is useful when there is a project description in a well-defined project file such as Cargo.toml, package.json, pyproject.toml, package.yaml, or stack.yaml. It enables the recipe to be preserved in as simple a state as possible, especially when there is no need to keep the changes in sync; some example use cases for this are with CI/CD infrastructure or when there is a well-defined output format.

    Below is an example loading a Cargo.toml inside of the rattler-build GitHub repository:

    recipe.yaml
    context:\n  name: ${{ load_from_file(\"Cargo.toml\").package.name }}\n  version: ${{ load_from_file(\"Cargo.toml\").package.version }}\n  source_url: ${{ load_from_file(\"Cargo.toml\").package.homepage }}\n  rust_toolchain: ${{ load_from_file(\"rust-toolchains\") }}\n\npackage:\n  name: ${{ name }}\n  version: ${{ version }}\n\nsource:\n  git: ${{ source_url }}\n  tag: ${{ source_tag }}}}\n\nrequirements:\n  build:\n    - rust ==${{ rust_toolchain }}\n\nbuild:\n  script: cargo build --release -p ${{ name }}\n\ntest:\n  - script: cargo test -p ${{ name }}\n  - script: cargo test -p rust-test -- --test-threads=1\n\nabout:\n  home: ${{ source_url }}\n  repository: ${{ source_url }}\n  documentation: ${{ load_from_file(\"Cargo.toml\").package.documentation }}\n  summary: ${{ load_from_file(\"Cargo.toml\").package.description }}\n  license: ${{ load_from_file(\"Cargo.toml\").package.license }}\n
    "},{"location":"experimental_features/#git-functions","title":"git functions","text":"

    git functions are useful for getting the latest tag and commit hash. These can be used in the context section of the recipe, to fetch version information from a repository.

    Examples
    # latest tag in the repo\ngit.latest_tag(<git_repo_url>)\n\n# latest tag revision(aka, hash of tag commit) in the repo\ngit.latest_tag_rev(<git_repo_url>)\n\n# latest commit revision(aka, hash of head commit) in the repo\ngit.head_rev(<git_repo_url>)\n
    "},{"location":"experimental_features/#usage_1","title":"Usage","text":"

    These can be useful for automating minor things inside of the recipe itself, such as if the current version is the latest version or if the current hash is the latest hash, etc.

    recipe.yaml
    context:\n  git_repo_url: \"https://github.com/prefix-dev/rattler-build\"\n  latest_tag: ${{ git.latest_tag( git_repo_url ) }}\n\npackage:\n  name: \"rattler-build\"\n  version: ${{ latest_tag }}\n\nsource:\n  git: ${{ git_repo_url }}\n  tag: ${{ latest_tag }}\n

    There is currently no guarantee of caching for repo fetches when using git functions. This may lead to some performance issues.

    "},{"location":"highlevel/","title":"What is rattler-build?","text":"

    rattler-build is a tool to build and package software so that it can be installed on any operating system \u2013 with any compatible package manager such as mamba, conda, or rattler. We are also intending for rattler-build to be used as a library to drive builds of packages from any other recipe format in the future.

    "},{"location":"highlevel/#how-does-rattler-build-work","title":"How does rattler-build work?","text":"

    Building of packages consists of several steps. It all begins with a recipe.yaml file that specifies how the package is to be built and what the dependencies are. From the recipe file, rattler-build executes several steps:

    1. Parse the recipe file and evaluate conditional parts (we will see that later, but parts of the recipe can be conditional, e.g. on Windows vs. macOS)
    2. Retrieve all source files specified in the recipe, such as .tar.gz files, git repositories or even local paths. Additionally, this step will apply patches that can be specified alongside the source file.
    3. Download and install dependencies into temporary \"host\" and \"build\" workspaces. Any dependencies that are needed at build time are installed in this step.
    4. Execute the build script to build/compile the source code and \"install\" it into the host environment.
    5. Collect all files that are new in the \"host\" environment (because the build script just created them) and apply some transformations if necessary; specifically, we edit the rpath on Linux and macOS to help make binaries relocatable.
    6. Bundle all the files in a package and write out any additional metadata into the info/index.json, info/about.json, and info/paths.json files. This also creates the test files that are bundled with the package.
    7. If any tests are specified in the recipe, then those tests are run. The package is considered \"done\" if it passes all of the tests, otherwise we move it to a \"broken\" place.

    After this process, a package is created. This package can be uploaded to somewhere like a custom prefix.dev private or public channel.

    "},{"location":"highlevel/#how-to-run-rattler-build","title":"How to run rattler-build","text":"

    Running rattler-build is straightforward. It can be done on the command line:

    rattler-build build --recipe myrecipe/recipe.yaml\n

    A custom channel that is not conda-forge (the default) can be specified like so:

    rattler-build build -c robostack --recipe myrecipe/recipe.yaml\n

    You can also use the --recipe-dir argument if you want to build all the packages in a directory:

    rattler-build build --recipe-dir myrecipes/\n
    "},{"location":"highlevel/#overview-of-a-recipeyaml","title":"Overview of a recipe.yaml","text":"

    A recipe.yaml file is separated into multiple sections and can conditionally include or exclude sections. Recipe files also support a limited amount of string interpolation with Jinja (specifically minijinja in our case).

    A simple example of a recipe file for the zlib package would look as follows:

    recipe.yaml
    # variables from the context section can be used in the rest of the recipe\n# in jinja expressions\ncontext:\n  version: 1.2.13\n\npackage:\n  name: zlib\n  version: ${{ version }}\n\nsource:\n  url: http://zlib.net/zlib-${{ version }}.tar.gz\n  sha256: b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30\n\nbuild:\n  # build numbers can be set arbitrarily\n  number: 0\n  script:\n    # build script to install the package into the $PREFIX (host prefix)\n    - if: unix\n      then:\n      - ./configure --prefix=$PREFIX\n      - make -j$CPU_COUNT\n    - if: win\n      then:\n      - cmake -G \"Ninja\" -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX%\n      - ninja install\n\nrequirements:\n  build:\n    # compiler is a special function.\n    - ${{ compiler(\"c\") }}\n    # The following two dependencies are only needed on Windows,\n    # and thus conditionally selected\n    - if: win\n      then:\n        - cmake\n        - ninja\n    - if: unix\n      then:\n        - make\n

    The sections of a recipe are:

    sections description context Defines variables that can be used in the Jinja context later in the recipe (e.g. name and version are commonly interpolated in strings) package This section defines the name and version of the package you are currently building and will be the name of the final output source Defines where the source code is going to be downloaded from and checksums build Settings for the build and the build script requirements Allows the definition of build, host, run and run-constrained dependencies"},{"location":"internals/","title":"Internals of rattler-build","text":""},{"location":"internals/#making-packages-relocatable-with-rattler-build","title":"Making Packages Relocatable with rattler-build","text":"

    Often, the most challenging aspect of building a package using rattler-build is making it relocatable. A relocatable package can be installed into any prefix, allowing it to be used outside the environment in which it was built. This is in contrast to a non-relocatable package, which can only be utilized within its original build environment.

    rattler-build automatically performs the following actions to make packages relocatable:

    1. Binary object file conversion: Binary object files are converted to use relative paths using install_name_tool on macOS and patchelf on Linux. This uses $ORIGIN for elf files on Linux and @loader_path for Mach-O files on macOS to make the rpath relative to the executable / shared library.
    2. Text file prefix registration: Any text file without NULL bytes containing the placeholder prefix have the registered prefix replaced with the install prefix.
    3. Binary file prefix detection and registration: Binary files containing the build prefix can be automatically registered. The registered files will have their build prefix replaced with the install prefix at install time. This works by padding the install prefix with null terminators, such that the length of the binary file remains the same. The build prefix must be long enough to accommodate any reasonable installation prefix. On macOS and Linux, rattler-build pads the build prefix to 255 characters by appending _placehold to the end of the build directory name.
    "},{"location":"internals/#what-goes-into-a-package","title":"What goes into a package?","text":"

    Generally speaking, any new files that are copied into the $PREFIX directory at build time are part of the new package. However, there is some filtering going on to exclude unwanted files, and noarch: python packages have special handling as well. The rules are as follows:

    "},{"location":"internals/#filtering","title":"Filtering","text":""},{"location":"internals/#general-file-filtering","title":"General File Filtering","text":"

    Certain files are filtered out to prevent them from being included in the package. These include:

    • .pyo files: Optimized Python files are not included because they are considered harmful.
    • .la files: Libtool archive files that are not needed at runtime.
    • .DS_Store files: macOS-specific files that are irrelevant to the package.
    • .git files and directories: Version control files, including .gitignore and the .git directory, which are not needed in the package.
    • share/info/dir This file is ignored because it would be written from multiple packages.
    "},{"location":"internals/#special-handling-for-noarch-python-packages","title":"Special Handling for noarch: python Packages","text":"

    For packages marked as noarch: python, special transformations are applied to ensure compatibility across different platforms:

    • Stripping Python Library Prefix: The \"lib/pythonX.X\" prefix is removed, retaining only the \"site-packages\" part of the path.
    • Skipping __pycache__ Directories and .pyc Files: These are excluded and recreated during installation (they are specific to the Python version).
    • Replacing bin and Scripts Directories:
    • On Unix systems, the bin directory is replaced with python-scripts.
    • On Windows systems, the Scripts directory is replaced with python-scripts.
    • Remove explicitly mentioned entrypoints: For noarch: python packages, entry points registered in the package are also taken into account. Files in the bin or Scripts directories that match entry points are excluded to avoid duplications.
    "},{"location":"internals/#symlink-handling","title":"Symlink Handling","text":"

    Symlinks are carefully managed to ensure they are relative rather than absolute, which aids in making the package relocatable:

    • Absolute symlinks pointing within the $PREFIX are converted to relative symlinks.
    • On Unix systems, this conversion is handled directly by creating new relative symlinks.
    • On Windows, a warning is issued since symlink creation requires administrator privileges.
    "},{"location":"package_spec/","title":"Package specification","text":"

    rattler-build produces \"conda\" packages. These packages work with the mamba and conda package managers, and they work cross-platform on Windows, Linux and macOS.

    By default, a conda package is a tar.bz2 archive which contains:

    • Metadata under the info/ directory
    • A collection of files that are installed directly into an install prefix

    The format is identical across platforms and operating systems. During the install process, all files are extracted into the install prefix, except the ones in info/. Installing a conda package into an environment is similar to executing the following commands:

    cd <environment prefix>\ntar xjf mypkg-1.0.0-h2134.tar.bz2\n

    Only files, including symbolic links, are part of a conda package. Directories are not included. Directories are created and removed as needed, but you cannot create an empty directory from the tar archive directly.

    There is also a newer archive type, suffixed with .conda. This archive type consists of an outer \"zip\" archive that is not compressed, and two inner archives that are compressed with zstd, which is very fast for decompression.

    The inner archives are split into info and pkg files, which makes it possible to extract only the info part of the archive (only the metadata), which is often smaller in size.

    "},{"location":"package_spec/#package-filename","title":"Package filename","text":"

    A conda package conforms to the following filename:

    <name>-<version>-<hash>.tar.bz2 OR <name>-<version>-<hash>.conda\n
    "},{"location":"package_spec/#special-files-in-packages","title":"Special files in packages","text":"

    There are some special files in a package:

    • activation and deactivation scripts that are executed when the environment is activated or deactivated
    • post-link and pre-unlink scripts that are executed when the package is installed or uninstalled

    You can read more about these files in the activation scripts and other special files section.

    "},{"location":"package_spec/#package-metadata","title":"Package metadata","text":"

    The info/ directory contains all metadata about a package. Files in this location are not installed under the install prefix. Although you are free to add any file to this directory, conda only inspects the content of the files discussed below:

    "},{"location":"package_spec/#infoindexjson","title":"info/index.json","text":"

    This file contains basic information about the package, such as name, version, build string, and dependencies. The content of this file is stored in repodata.json, which is the repository index file, hence the name index.json. The JSON object is a dictionary containing the keys shown below.

    name: string

    The lowercase name of the package. May contain lowercase characters, underscores, and dashes.

    version: string

    The package version. May not contain \"-\". Acknowledges PEP 440.

    build: string

    The build string. May not contain \"-\". Differentiates builds of packages with otherwise identical names and versions, such as:

    • A build with other dependencies, such as Python 3.4 instead of Python 2.7.
    • A bug fix in the build process.
    • Some different optional dependencies, such as MKL versus ATLAS linkage. Nothing in conda actually inspects the build string. Strings such as np18py34_1 are designed only for human readability and conda never parses them.
    build_number: integer

    A non-negative integer representing the build number of the package. Unlike the build string, the build_number is inspected by conda. Conda uses it to sort packages that have otherwise identical names and versions to determine the latest one. This is important because new builds that contain bug fixes for the way a package is built may be added to a repository.

    depends: list of match specs

    A list of dependency specifications, where each element is a string. These come from the run section of the recipe or any run exports of dependencies.

    constrains: list of match specs

    A list of optional dependency constraints. The packages listed under constrains are not installed by default, but if they are installed they have to respect the constraints.

    subdir: string

    The subdir (like linux-64) of this package.

    arch: string

    Optional. The architecture the package is built for. EXAMPLE: x86_64. This key is generally not used (duplicate information from sudir).

    platform: string

    Optional. The OS that the package is built for, e.g. osx. This key is generally not used (duplicate information from sudir).

    "},{"location":"package_spec/#infopathsjson","title":"info/paths.json","text":"

    The paths.json file lists all files that are installed into the environment.

    It consists of a list of path entries, each with the following keys:

    _path: string

    The relative path of the file

    path_type: optional, string

    The type of linking, can be hardlink, softlink, or directory. Default is hardlink.

    file_mode: - optional, string

    The file mode can be binary or text. This is only relevant for prefix replacement.

    prefix_placeholder: optional, string

    The prefix placeholder string that is encoded in the text or binary file, which is replaced at installation time. Note that this prefix placeholder uses / even on Windows.

    no_link: bool, optional

    Determines whether this file should be linked or not when installing the package (linking the file from the cache into the environment). Defaults to false.

    sha256: string

    The SHA256 hash of the file. For symbolic links it contains the SHA256 hash of the file pointed to.

    size_in_bytes: number

    The size, in bytes, of the file. For symbolic links, it contains the file size of the file pointed to.

    Due to the way the binary replacement works, the placeholder prefix must be longer than the install prefix.

    "},{"location":"package_spec/#infolicense","title":"info/license/<...>","text":"

    All licenses mentioned in the recipe are copied to this folder.

    "},{"location":"package_spec/#infoaboutjson","title":"info/about.json","text":"

    Optional file. Contains the entries of the \"about\" section of the recipe of the recipe.yaml file. The following keys are added to info/about.json if present in the build recipe:

    Renamed fields

    The new recipe spec renamed a few fields (from conda-build's original implementation). This means that some fields in the about.json file still have the old names (for backwards compatibility), while you would generally use different names in the recipe.

    home: url (from about.homepage)

    The URL of the homepage of the package.

    dev_url: url (from about.repository)

    The URL of the development repository of the package.

    doc_url: url (from about.documentation)

    The URL of the documentation of the package.

    license_url: url

    The URL of the license of the package.

    license: string (from about.license)

    The SPDX license identifier of the package.

    summary: string

    A short summary of the package.

    description: string

    A longer description of the package.

    license_family: string

    (this field is not used anymore as we rely on SPDX license identifiers)

    "},{"location":"package_spec/#inforecipe","title":"info/recipe/<...>","text":"

    A directory containing the full contents of the build recipe. This folder also contains a rendered version of the recipe (rendered_recipe.yaml). This rendered version is used for the rebuild command. However, note that currently this format is still in flux and can change at any time.

    You can also use --no-include-recipe to disable the inclusion of the recipe in the package.

    "},{"location":"rebuild/","title":"Rebuilding a package","text":"

    The rebuild command allows you to rebuild a package from an existing package. The main use case is to examine if a package can be rebuilt in a reproducible manner. You can read more about reproducible builds here.

    "},{"location":"rebuild/#usage","title":"Usage","text":"
    rattler-build rebuild ./mypkg-0.1.0-h60d57d3_0.tar.bz2\n
    "},{"location":"rebuild/#how-it-works","title":"How it works","text":"

    The recipe is \"rendered\" and stored into the package. The way the recipe is rendered is subject to change. For the moment, the rendered recipe is stored as info/recipe/rendered_recipe.yaml. It includes the exact package versions that were used at build time. When rebuilding, we use the package resolutions from the rendered recipe, and execute the same build script as the original package.

    We also take great care to sort files in a deterministic manner as well as erasing any time stamps. The SOURCE_DATE_EPOCH environment variable is set to the same timestamp as the original build for additional determinism (some build tools use this variable to set timestamps).

    "},{"location":"rebuild/#how-to-check-the-reproducibility-of-a-package","title":"How to check the reproducibility of a package","text":"

    There is an excellent tool called diffoscope that allows you to compare two packages and see the differences. You can install it with pixi:

    pixi global install diffoscope\n

    To compare two packages, you can use the following command:

    rattler-build rebuild ./build0.tar.bz2\ndiffoscope ./build0.tar.bz2 ./mypkg-0.1.0-h60d57d3_0.tar.bz2\n
    "},{"location":"recipe_file/","title":"The recipe spec","text":"

    rattler-build implements a new recipe spec, different from the traditional \"meta.yaml\" file used in conda-build. A recipe has to be stored as a recipe.yaml file.

    "},{"location":"recipe_file/#history","title":"History","text":"

    A discussion was started on what a new recipe spec could or should look like. The fragments of this discussion can be found here.

    The reason for a new spec are:

    • make it easier to parse (i.e. \"pure YAML\"); conda-build uses a mix of comments and Jinja to achieve a great deal of flexibility, but it's hard to parse the recipe with a computer
    • iron out some inconsistencies around multiple outputs (build vs. build/script and more)
    • remove any need for recursive parsing & solving
    • finally, the initial implementation in boa relied on conda-build; rattler-build removes any dependency on Python or conda-build and reimplements everything in Rust
    "},{"location":"recipe_file/#major-differences-from-conda-build","title":"Major differences from conda-build","text":"
    • recipe filename is recipe.yaml, not meta.yaml
    • outputs have less complicated behavior, keys are same as top-level recipe (e.g. build/script, not just script and package/name, not just name)
    • no implicit meta-packages in outputs
    • no full Jinja2 support: no conditional or {% set ... support, only string interpolation; variables can be set in the toplevel \"context\" which is valid YAML
    • Jinja string interpolation needs to be preceded by a dollar sign at the beginning of a string, e.g. - ${{ version }} in order for it to be valid YAML
    • selectors use a YAML dictionary style (vs. comments in conda-build). Instead of - somepkg #[osx] we use:
      if: osx\nthen:\n  - somepkg\n
    • skip instruction uses a list of skip conditions and not the selector syntax from conda-build (e.g. skip: [\"osx\", \"win and py37\"])
    "},{"location":"recipe_file/#spec","title":"Spec","text":"

    The recipe spec has the following parts:

    • context: to set up variables that can later be used in Jinja string interpolation
    • package: defines name, version etc. of the top-level package
    • source: points to the sources that need to be downloaded in order to build the recipe
    • build: defines how to build the recipe and what build number to use
    • requirements: defines requirements of the top-level package
    • test: defines tests for the top-level package
    • outputs: a recipe can have multiple outputs. Each output can and should have a package, requirements and test section
    "},{"location":"recipe_file/#spec-reference","title":"Spec reference","text":"

    The spec is also made available through a JSON Schema (which is used for validation). The schema (and pydantic source file) can be found in this repository: recipe-format

    To use with VSCode(yaml-plugin) and other IDEs:

    Either start the document with the following line:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n
    Or, using yaml.schemas,
    yaml.schemas: {\n  \"https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\": \"**/recipe.yaml\",\n}\n
    Read more about this here.

    See more in the automatic linting chapter.

    "},{"location":"recipe_file/#examples","title":"Examples","text":"recipe.yaml
    # this sets up \"context variables\" (in this case name and version) that\n# can later be used in Jinja expressions\ncontext:\n  version: 1.1.0\n  name: imagesize\n\n# top level package information (name and version)\npackage:\n  name: ${{ name }}\n  version: ${{ version }}\n\n# location to get the source from\nsource:\n  url: https://pypi.io/packages/source/${{ name[0] }}/${{ name }}/${{ name }}-${{ version }}.tar.gz\n  sha256: f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5\n\n# build number (should be incremented if a new build is made, but version is not incrementing)\nbuild:\n  number: 1\n  script: python -m pip install --no-deps --ignore-installed .\n\n# the requirements at build and runtime\nrequirements:\n  host:\n    - python\n    - pip\n  run:\n    - python\n\n# tests to validate that the package works as expected\ntests:\n  - python:\n      imports:\n        - imagesize\n\n# information about the package\nabout:\n  homepage: https://github.com/shibukawa/imagesize_py\n  license: MIT\n  summary: 'Getting image size from png/jpeg/jpeg2000/gif file'\n  description: |\n    This module analyzes jpeg/jpeg2000/png/gif image header and\n    return image size.\n  repository: https://github.com/shibukawa/imagesize_py\n  documentation: https://pypi.python.org/pypi/imagesize\n\n# the below is conda-forge specific!\nextra:\n  recipe-maintainers:\n    - somemaintainer\n
    "},{"location":"recipe_file/#package-section","title":"Package section","text":"

    Specifies package information.

    package:\n  name: bsdiff4\n  version: \"2.1.4\"\n
    • name: The lower case name of the package. It may contain \"-\", but no spaces.
    • version: The version number of the package. Use the PEP-386 verlib conventions. Cannot contain \"-\". YAML interprets version numbers such as 1.0 as floats, meaning that 0.10 will be the same as 0.1. To avoid this, put the version number in quotes so that it is interpreted as a string.
    "},{"location":"recipe_file/#source-section","title":"Source section","text":"

    Specifies where the source code of the package is coming from. The source may come from a tarball file, git, hg, or svn. It may be a local path and it may contain patches.

    "},{"location":"recipe_file/#source-from-tarball-or-zip-archive","title":"Source from tarball or zip archive","text":"
    source:\n  url: https://pypi.python.org/packages/source/b/bsdiff4/bsdiff4-1.1.4.tar.gz\n  md5: 29f6089290505fc1a852e176bd276c43\n  sha1: f0a2c9a30073449cfb7d171c57552f3109d93894\n  sha256: 5a022ff4c1d1de87232b1c70bde50afbb98212fd246be4a867d8737173cf1f8f\n

    If an extracted archive contains only 1 folder at its top level, its contents will be moved 1 level up, so that the extracted package contents sit in the root of the work folder.

    "},{"location":"recipe_file/#source-from-git","title":"Source from git","text":"
    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  # branch: master # note: defaults to fetching the repo's default branch\n

    You can use rev to pin the commit version directly:

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  rev: \"50a1f7ed6c168eb0815d424cba2df62790f168f0\"\n

    Or you can use the tag:

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  tag: \"1.1.4\"\n

    git can also be a relative path to the recipe directory:

    source:\n  git: ../../bsdiff4/.git\n  tag: \"1.1.4\"\n

    Futhermore, if you want to fetch just the current \"HEAD\" (this may result in non-deterministic builds), then you can use depth.

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  depth: 1 # note: the behaviour defaults to -1\n

    Note: tag or rev may not be available within commit depth range, hence we don't allow using rev or the tag and depth of them together if not set to -1.

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  tag: \"1.1.4\"\n  depth: 1 # error: use of `depth` with `rev` is invalid, they are mutually exclusive\n

    When you want to use git-lfs, you need to set lfs: true. This will also pull the lfs files from the repository.

    source:\n  git: ../../bsdiff4/.git\n  tag: \"1.1.4\"\n  lfs: true # note: defaults to false\n
    "},{"location":"recipe_file/#source-from-a-local-path","title":"Source from a local path","text":"

    If the path is relative, it is taken relative to the recipe directory. The source is copied to the work directory before building.

      source:\n    path: ../src\n    use_gitignore: false # note: defaults to true\n

    By default, all files in the local path that are ignored by git are also ignored by rattler-build. You can disable this behavior by setting use_gitignore to false.

    "},{"location":"recipe_file/#patches","title":"Patches","text":"

    Patches may optionally be applied to the source.

      source:\n    #[source information here]\n    patches:\n      - my.patch # the patch file is expected to be found in the recipe\n
    "},{"location":"recipe_file/#destination-path","title":"Destination path","text":"

    Within rattler-build's work directory, you may specify a particular folder to place the source into. rattler-build will always drop you into the same folder ([build folder]/work), but it's up to you whether you want your source extracted into that folder, or nested deeper. This feature is particularly useful when dealing with multiple sources, but can apply to recipes with single sources as well.

    source:\n  #[source information here]\n  target_directory: my-destination/folder\n
    "},{"location":"recipe_file/#source-from-multiple-sources","title":"Source from multiple sources","text":"

    Some software is most easily built by aggregating several pieces.

    The syntax is a list of source dictionaries. Each member of this list follows the same rules as the single source. All features for each member are supported.

    Example:

    source:\n  - url: https://package1.com/a.tar.bz2\n    target_directory: stuff\n  - url: https://package1.com/b.tar.bz2\n    target_directory: stuff\n  - git: https://github.com/mamba-org/boa\n    target_directory: boa\n

    Here, the two URL tarballs will go into one folder, and the git repo is checked out into its own space. git will not clone into a non-empty folder.

    "},{"location":"recipe_file/#build-section","title":"Build section","text":"

    Specifies build information.

    Each field that expects a path can also handle a glob pattern. The matching is performed from the top of the build environment, so to match files inside your project you can use a pattern similar to the following one: \"**/myproject/**/*.txt\". This pattern will match any .txt file found in your project. Quotation marks (\"\") are required for patterns that start with a *.

    Recursive globbing using ** is also supported.

    "},{"location":"recipe_file/#build-number-and-string","title":"Build number and string","text":"

    The build number should be incremented for new builds of the same version. The number defaults to 0. The build string cannot contain \"-\". The string defaults to the default rattler-build build string plus the build number.

    build:\n  number: 1\n  string: abc\n
    "},{"location":"recipe_file/#dynamic-linking","title":"Dynamic linking","text":"

    This section contains settings for the shared libraries and executables.

    build:\n  dynamic_linking:\n    rpath_allowlist: [\"/usr/lib/**\"]\n
    "},{"location":"recipe_file/#python-entry-points","title":"Python entry points","text":"

    The following example creates a Python entry point named \"bsdiff4\" that calls bsdiff4.cli.main_bsdiff4().

    build:\n  python:\n    entry_points:\n      - bsdiff4 = bsdiff4.cli:main_bsdiff4\n      - bspatch4 = bsdiff4.cli:main_bspatch4\n
    "},{"location":"recipe_file/#script","title":"Script","text":"

    By default, rattler-build uses a build.sh file on Unix (macOS and Linux) and a build.bat file on Windows, if they exist in the same folder as the recipe.yaml file. With the script parameter you can either supply a different filename or write out short build scripts. You may need to use selectors to use different scripts for different platforms.

    build:\n  # A very simple build script\n  script: pip install .\n\n  # The build script can also be a list\n  script:\n    - pip install .\n    - echo \"hello world\"\n    - if: unix\n      then:\n        - echo \"unix\"\n
    "},{"location":"recipe_file/#skipping-builds","title":"Skipping builds","text":"

    Lists conditions under which rattler-build should skip the build of this recipe. Particularly useful for defining recipes that are platform-specific. By default, a build is never skipped.

    build:\n  skip:\n    - win\n    ...\n
    "},{"location":"recipe_file/#architecture-independent-packages","title":"Architecture-independent packages","text":"

    Allows you to specify \"no architecture\" when building a package, thus making it compatible with all platforms and architectures. Architecture-independent packages can be installed on any platform.

    Assigning the noarch key as generic tells conda to not try any manipulation of the contents.

    build:\n  noarch: generic\n

    noarch: generic is most useful for packages such as static JavaScript assets and source archives. For pure Python packages that can run on any Python version, you can use the noarch: python value instead:

    build:\n  noarch: python\n

    Note

    At the time of this writing, noarch packages should not make use of preprocess-selectors: noarch packages are built with the directives which evaluate to true in the platform it is built on, which probably will result in incorrect/incomplete installation in other platforms.

    "},{"location":"recipe_file/#include-build-recipe","title":"Include build recipe","text":"

    The recipe and rendered recipe.yaml file are included in the package_metadata by default. You can disable this by passing --no-include-recipe on the command line.

    Note

    There are many more options in the build section. These additional options control how variants are computed, prefix replacements, and more. See the full build options for more information.

    "},{"location":"recipe_file/#requirements-section","title":"Requirements section","text":"

    Specifies the build and runtime requirements. Dependencies of these requirements are included automatically.

    Versions for requirements must follow the conda/mamba match specification. See build-version-spec.

    "},{"location":"recipe_file/#build","title":"Build","text":"

    Tools required to build the package.

    These packages are run on the build system and include things such as version control systems (git, svn) make tools (GNU make, Autotool, CMake) and compilers (real cross, pseudo-cross, or native when not cross-compiling), and any source pre-processors.

    Packages which provide \"sysroot\" files, like the CDT packages (see below), also belong in the build section.

    requirements:\n  build:\n    - git\n    - cmake\n
    "},{"location":"recipe_file/#host","title":"Host","text":"

    Represents packages that need to be specific to the target platform when the target platform is not necessarily the same as the native build platform. For example, in order for a recipe to be \"cross-capable\", shared libraries requirements must be listed in the host section, rather than the build section, so that the shared libraries that get linked are ones for the target platform, rather than the native build platform. You should also include the base interpreter for packages that need one. In other words, a Python package would list python here and an R package would list mro-base or r-base.

    requirements:\n  build:\n    - ${{ compiler('c') }}\n    - if: linux\n      then:\n        - ${{ cdt('xorg-x11-proto-devel') }}\n  host:\n    - python\n

    Note

    When both \"build\" and \"host\" sections are defined, the build section can be thought of as \"build tools\" - things that run on the native platform, but output results for the target platform (e.g. a cross-compiler that runs on linux-64, but targets linux-armv7).

    The PREFIX environment variable points to the host prefix. With respect to activation during builds, both the host and build environments are activated. The build prefix is activated before the host prefix so that the host prefix has priority over the build prefix. Executables that don't exist in the host prefix should be found in the build prefix.

    The build and host prefixes are always separate when both are defined, or when ${{ compiler() }} Jinja2 functions are used. The only time that build and host are merged is when the host section is absent, and no ${{ compiler() }} Jinja2 functions are used in meta.yaml.

    "},{"location":"recipe_file/#run","title":"Run","text":"

    Packages required to run the package.

    These are the dependencies that are installed automatically whenever the package is installed. Package names should follow the package match specifications.

    requirements:\n  run:\n    - python\n    - six >=1.8.0\n

    To build a recipe against different versions of NumPy and ensure that each version is part of the package dependencies, list numpy as a requirement in recipe.yaml and use a conda_build_config.yaml file with multiple NumPy versions.

    "},{"location":"recipe_file/#run-constraints","title":"Run constraints","text":"

    Packages that are optional at runtime but must obey the supplied additional constraint if they are installed.

    Package names should follow the package match specifications.

    requirements:\n  run_constraints:\n    - optional-subpackage ==${{ version }}\n

    For example, let's say we have an environment that has package \"a\" installed at version 1.0. If we install package \"b\" that has a run_constraints entry of \"a >1.0\", then mamba would need to upgrade \"a\" in the environment in order to install \"b\".

    This is especially useful in the context of virtual packages, where the run_constraints dependency is not a package that mamba manages, but rather a virtual package that represents a system property that mamba can't change. For example, a package on Linux may impose a run_constraints dependency on __glibc >=2.12. This is the version bound consistent with CentOS 6. Software built against glibc 2.12 will be compatible with CentOS 6. This run_constraints dependency helps mamba, conda or pixi tell the user that a given package can't be installed if their system glibc version is too old.

    "},{"location":"recipe_file/#run-exports","title":"Run exports","text":"

    Packages may have runtime requirements such as shared libraries (e.g. zlib), which are required for linking at build time, and for resolving the link at run time. Such packages use run_exports for defining the runtime requirements to let the dependent packages understand the runtime requirements of the package.

    Example from zlib:

      requirements:\n    run_exports:\n      - ${{ pin_subpackage('libzlib', exact=True) }}\n

    Run exports are weak by default. But you can also define strong run_exports.

      requirements:\n    run_exports:\n      strong:\n        - ${{ pin_subpackage('libzlib', exact=True) }}\n
    "},{"location":"recipe_file/#ignore-run-exports","title":"Ignore run exports","text":"

    There maybe cases where an upstream package has a problematic run_exports constraint. You can ignore it in your recipe by listing the upstream package name in the ignore_run_exports section in requirements.

    You can ignore them by package name, or by naming the runtime dependency directly.

      requirements:\n    ignore_run_exports:\n      from_package:\n        - zlib\n

    Using a runtime depenedency name:

      requirements:\n    ignore_run_exports:\n      from_name:\n        - libzlib\n

    Note

    ignore_run_exports only applies to runtime dependencies coming from an upstream package.

    "},{"location":"recipe_file/#tests-section","title":"Tests section","text":"

    rattler-build supports four different types of tests. The \"script test\" installs the package and runs a list of commands. The \"Python test\" attempts to import a list of Python modules and runs pip check. The \"downstream test\" runs the tests of a downstream package that reverse depends on the package being built. And lastly, the \"package content test\" checks if the built package contains the mentioned items.

    The tests section is a list of these items:

    tests:\n  - script:\n      - echo \"hello world\"\n    requirements:\n      run:\n        - pytest\n    files:\n      source:\n        - test-data.txt\n\n  - python:\n      imports:\n        - bsdiff4\n      pip_check: true  # this is the default\n  - downstream: numpy\n
    "},{"location":"recipe_file/#script-test","title":"Script test","text":"

    The script test has 3 top-level keys: script, files and requirements. Only the script key is required.

    "},{"location":"recipe_file/#test-commands","title":"Test commands","text":"

    Commands that are run as part of the test.

    tests:\n  - script:\n      - echo \"hello world\"\n      - bsdiff4 -h\n      - bspatch4 -h\n
    "},{"location":"recipe_file/#extra-test-files","title":"Extra test files","text":"

    Test files that are copied from the source work directory into the temporary test directory and are needed during testing (note that the source work directory is otherwise not available at all during testing).

    You can also include files that come from the recipe folder. They are copied into the test directory as well.

    At test execution time, the test directory is the current working directory.

    tests:\n  - script:\n      - ls\n    files:\n      source:\n        - myfile.txt\n        - tests/\n        - some/directory/pattern*.sh\n      recipe:\n        - extra-file.txt\n
    "},{"location":"recipe_file/#test-requirements","title":"Test requirements","text":"

    In addition to the runtime requirements, you can specify requirements needed during testing. The runtime requirements that you specified in the \"run\" section described above are automatically included during testing (because the built package is installed as it regularly would be).

    In the build section you can specify additional requirements that are only needed on the build system for cross-compilation (e.g. emulators or compilers).

    tests:\n  - script:\n      - echo \"hello world\"\n    requirements:\n      build:\n        - myemulator\n      run:\n        - nose\n
    "},{"location":"recipe_file/#python-tests","title":"Python tests","text":"

    For this test type you can list a set of Python modules that need to be importable. The test will fail if any of the modules cannot be imported.

    The test will also automatically run pip check to check for any broken dependencies. This can be disabled by setting pip_check: false in the YAML.

    tests:\n  - python:\n      imports:\n        - bsdiff4\n        - bspatch4\n      pip_check: true  # can be left out because this is the default\n

    Internally this will write a small Python script that imports the modules:

    import bsdiff4\nimport bspatch4\n
    "},{"location":"recipe_file/#check-for-package-contents","title":"Check for package contents","text":"

    Checks if the built package contains the mentioned items. These checks are executed directly at the end of the build process to make sure that all expected files are present in the package.

    tests:\n  - package_contents:\n      # checks for the existence of files inside $PREFIX or %PREFIX%\n      # or, checks that there is at least one file matching the specified `glob`\n      # pattern inside the prefix\n      files:\n        - etc/libmamba/test.txt\n        - etc/libmamba\n        - etc/libmamba/*.mamba.txt\n\n      # checks for the existence of `mamba/api/__init__.py` inside of the\n      # Python site-packages directory (note: also see Python import checks)\n      site_packages:\n        - mamba.api\n\n\n      # looks in $PREFIX/bin/mamba for unix and %PREFIX%\\Library\\bin\\mamba.exe on Windows\n      # note: also check the `commands` and execute something like `mamba --help` to make\n      # sure things work fine\n      bin:\n        - mamba\n\n      # searches for `$PREFIX/lib/libmamba.so` or `$PREFIX/lib/libmamba.dylib` on Linux or macOS,\n      # on Windows for %PREFIX%\\Library\\lib\\mamba.dll & %PREFIX%\\Library\\bin\\mamba.bin\n      lib:\n        - mamba\n\n      # searches for `$PREFIX/include/libmamba/mamba.hpp` on unix, and\n      # on Windows for `%PREFIX%\\Library\\include\\libmamba\\mamba.hpp`\n      include:\n        - libmamba/mamba.hpp\n
    "},{"location":"recipe_file/#downstream-tests","title":"Downstream tests","text":"

    Warning

    Downstream tests are not yet implemented in rattler-build.

    A downstream test can mention a single package that has a dependency on the package being built. The test will install the package and run the tests of the downstream package with our current package as a dependency.

    Sometimes downstream packages do not resolve. In this case, the test is ignored.

    tests:\n  - downstream: numpy\n
    "},{"location":"recipe_file/#outputs-section","title":"Outputs section","text":"

    Explicitly specifies packaging steps. This section supports multiple outputs, as well as different package output types. The format is a list of mappings.

    When using multiple outputs, certain top-level keys are \"forbidden\": package and requirements. Instead of package, a top-level recipe key can be defined. The recipe.name is ignored but the recipe.version key is used as default version for each output. Other \"top-level\" keys are merged into each output (e.g. the about section) to avoid repetition. Each output is a complete recipe, and can have its own build, requirements, and test sections.

    recipe:\n  # the recipe name is ignored\n  name: some\n  version: 1.0\n\noutputs:\n  - package:\n      # version is taken from recipe.version (1.0)\n      name: some-subpackage\n\n  - package:\n      name: some-other-subpackage\n      version: 2.0\n

    Each output acts like an independent recipe and can have their own script, build_number, and so on.

    outputs:\n  - package:\n      name: subpackage-name\n    build:\n      script: install-subpackage.sh\n

    Each output is built independently. You should take care of not packaging the same files twice.

    "},{"location":"recipe_file/#subpackage-requirements","title":"Subpackage requirements","text":"

    Like a top-level recipe, a subpackage may have zero or more dependencies listed as build, host or run requirements.

    The dependencies listed as subpackage build requirements are available only during the packaging phase of that subpackage.

    outputs:\n  - package:\n      name: subpackage-name\n    requirements:\n      build:\n        - some-dep\n      run:\n        - some-dep\n

    You can also use the pin_subpackage function to pin another output from the same recipe.

    outputs:\n  - package:\n      name: libtest\n  - package:\n      name: test\n    requirements:\n      build:\n        - ${{ pin_subpackage('libtest', max_pin='x.x') }}\n

    The outputs are topologically sorted by the dependency graph which is taking the pin_subpackage invocations into account. When using pin_subpackage(name, exact=True) a special behavior is used where the name package is injected as a \"variant\" and the variant matrix is expanded appropriately. For example, when you have the following situation, with a variant_config.yaml file that contains openssl: [1, 3]:

    outputs:\n  - package:\n      name: libtest\n    requirements:\n      host:\n        - openssl\n  - package:\n      name: test\n    requirements:\n      build:\n        - ${{ pin_subpackage('libtest', exact=True) }}\n

    Due to the variant config file, this will build two versions of libtest. We will also build two versions of test, one that depends on libtest (openssl 1) and one that depends on libtest (openssl 3).

    "},{"location":"recipe_file/#about-section","title":"About section","text":"

    Specifies identifying information about the package. The information displays in the package server.

    about:\n  homepage: https://example.com/bsdiff4\n  license: BSD-3-Clause # (1)!\n  license_file: LICENSE\n  summary: binary diff and patch using the BSDIFF4-format\n  description: |\n    Long description of bsdiff4 ...\n  repository: https://github.com/ilanschnell/bsdiff4\n  documentation: https://docs.com\n
    1. Only the SPDX specifiers are allowed, more info here: SPDX If you want another license type LicenseRef-<YOUR-LICENSE> can be used, e.g. license: LicenseRef-Proprietary
    "},{"location":"recipe_file/#license-file","title":"License file","text":"

    Adds a file containing the software license to the package metadata. Many licenses require the license statement to be distributed with the package. The filename is relative to the source or recipe directory. The value can be a single filename or a YAML list for multiple license files. Values can also point to directories with license information. Directory entries must end with a / suffix (this is to lessen unintentional inclusion of non-license files; all the directory's contents will be unconditionally and recursively added).

    about:\n  license_file:\n    - LICENSE\n    - vendor-licenses/\n
    "},{"location":"recipe_file/#extra-section","title":"Extra section","text":"

    A schema-free area for storing non-conda-specific metadata in standard YAML form.

    Example: To store recipe maintainers information
    extra:\n  maintainers:\n   - name of maintainer\n
    "},{"location":"recipe_file/#templating-with-jinja","title":"Templating with Jinja","text":"

    rattler-build supports limited Jinja templating in the recipe.yaml file.

    You can set up Jinja variables in the context section:

    context:\n  name: \"test\"\n  version: \"5.1.2\"\n  # later keys can reference previous keys\n  # and use jinja functions to compute new values\n  major_version: ${{ version.split('.')[0] }}\n

    Later in your recipe.yaml you can use these values in string interpolation with Jinja:

    source:\n  url: https://github.com/mamba-org/${{ name }}/v${{ version }}.tar.gz\n

    Jinja has built-in support for some common string manipulations.

    In rattler-build, complex Jinja is completely disallowed as we try to produce YAML that is valid at all times. So you should not use any {% if ... %} or similar Jinja constructs that produce invalid YAML. Furthermore, instead of plain double curly brackets Jinja statements need to be prefixed by $, e.g. ${{ ... }}:

    package:\n  name: {{ name }}   # WRONG: invalid yaml\n  name: ${{ name }} # correct\n

    For more information, see the Jinja template documentation and the list of available environment variables env-vars.

    Jinja templates are evaluated during the build process.

    "},{"location":"recipe_file/#additional-jinja2-functionality-in-rattler-build","title":"Additional Jinja2 functionality in rattler-build","text":"

    Besides the default Jinja2 functionality, additional Jinja functions are available during the rattler-build process: pin_compatible, pin_subpackage, and compiler.

    The compiler function takes c, cxx, fortran and other values as argument and automatically selects the right (cross-)compiler for the target platform.

    build:\n  - ${{ compiler('c') }}\n

    The pin_subpackage function pins another package produced by the recipe with the supplied parameters.

    Similarly, the pin_compatible function will pin a package according to the specified rules.

    "},{"location":"recipe_file/#pin-expressions","title":"Pin expressions","text":"

    rattler-build knows pin expressions. A pin expression can have a min_pin, max_pin and exact value. A max_pin and min_pin are specified with a string containing only x and ., e.g. max_pin=\"x.x.x\" would signify to pin the given package to <1.2.3 (if the package version is 1.2.2, for example).

    A pin with min_pin=\"x.x\",max_pin=\"x.x\" for a package of version 1.2.2 would evaluate to >=1.2.2,<1.2.3.

    If exact=true, then the hash is included, and the package is pinned exactly, e.g. ==1.2.2 h1234. This is a unique package variant that cannot exist more than once, and thus is \"exactly\" pinned.

    "},{"location":"recipe_file/#pin-subpackage","title":"Pin subpackage","text":"

    Pin subpackage refers to another package from the same recipe file. It is commonly used in the build/run_exports section to export a run export from the package, or with multiple outputs to refer to a previous build.

    It looks something like:

    package:\n  name: mypkg\n  version: \"1.2.3\"\n\nrequirements:\n  run_exports:\n    # this will evaluate to `mypkg <1.3`\n    - ${{ pin_subpackage(name, max_pin='x.x') }}\n
    "},{"location":"recipe_file/#pin-compatible","title":"Pin compatible","text":"

    Pin compatible lets you pin a package based on the version retrieved from the variant file (if the pinning from the variant file needs customization).

    For example, if the variant specifies a pin for numpy: 1.11, one can use pin_compatible to relax it:

    requirements:\n  host:\n    # this will select nupy 1.11\n    - numpy\n  run:\n    # this will export `numpy >=1.11,<2`, instead of the stricter `1.11` pin\n    - ${{ pin_compatible('numpy', min_pin='x.x', max_pin='x') }}\n
    "},{"location":"recipe_file/#the-env-jinja-functions","title":"The env Jinja functions","text":"

    You can access the current environment variables using the env object in Jinja.

    There are three functions:

    • env.get(\"ENV_VAR\") will insert the value of \"ENV_VAR\" into the recipe.
    • env.get_default(\"ENV_VAR\", \"undefined\") will insert the value of \"ENV_VAR\" into the recipe or, if \"ENV_VAR\" is not defined, the specified default value (in this case \"undefined\")
    • env.exists(\"ENV_VAR\") returns a boolean true of false if the env var is set to any value

    This can be used for some light templating, for example:

    build:\n  string: ${{ env.get(\"GIT_BUILD_STRING\") }}_${{ PKG_HASH }}\n
    "},{"location":"recipe_file/#cmp-function","title":"cmp function","text":"

    This function matches the first argument (the package's MatchSpec) against the second argument (the version spec) and returns the resulting boolean.

    cmp(python, '>=3.4')\n

    Example: cmp usage example

    "},{"location":"recipe_file/#cdt-function","title":"cdt function","text":"

    This function helps add Core Dependency Tree packages as dependencies by converting packages as required according to hard-coded logic.

    # on x86_64 system\ncdt('package-name') # outputs: package-name-cos6-x86_64\n# on aarch64 system\ncdt('package-name') # outputs: package-name-cos6-aarch64\n

    Example: cdt usage example

    "},{"location":"recipe_file/#preprocessing-selectors","title":"Preprocessing selectors","text":"

    You can add selectors to any item, and the selector is evaluated in a preprocessing stage. If a selector evaluates to true, the item is flattened into the parent element. If a selector evaluates to false, the item is removed.

    Selectors can use if ... then ... else as follows:

    source:\n  - if: not win\n    then:\n      - url: http://path/to/unix/source\n    else:\n      - url: http://path/to/windows/source\n\n# or the equivalent with two if conditions:\n\nsource:\n  - if: unix\n    then:\n      - url: http://path/to/unix/source\n  - if: win\n    then:\n      - url: http://path/to/windows/source\n

    A selector is a valid Python statement that is executed. You can read more about them in the \"Selectors in recipes\" chapter.

    The use of the Python version selectors, py27, py34, etc. is discouraged in favor of the more general comparison operators. Additional selectors in this series will not be added to conda-build.

    Because the selector is any valid Python expression, complicated logic is possible:

    - if: unix and not win\n  then: ...\n- if: (win or linux) and not py27\n  then: ...\n

    Lists are automatically \"merged\" upwards, so it is possible to group multiple items under a single selector:

    tests:\n  - script:\n    - if: unix\n      then:\n      - test -d ${PREFIX}/include/xtensor\n      - test -f ${PREFIX}/lib/cmake/xtensor/xtensorConfigVersion.cmake\n    - if: win\n      then:\n      - if not exist %LIBRARY_PREFIX%\\include\\xtensor\\xarray.hpp (exit 1)\n      - if not exist %LIBRARY_PREFIX%\\lib\\cmake\\xtensor\\xtensorConfigVersion.cmake (exit 1)\n\n# On unix this is rendered to:\ntests:\n  - script:\n    - test -d ${PREFIX}/include/xtensor\n    - test -f ${PREFIX}/lib/cmake/xtensor/xtensorConfigVersion.cmake\n
    "},{"location":"recipe_file/#experimental-features","title":"Experimental features","text":"

    Warning

    These are experimental features of rattler-build and may change or go away completely.

    "},{"location":"recipe_file/#jinja-functions","title":"Jinja functions","text":"
    • load_from_file
    • git.* functions
    "},{"location":"selectors/","title":"Selectors in recipes","text":"

    Recipe and variant configuration files can utilize selectors to conditionally add, remove, or modify dependencies, configuration options, or even skip recipe execution based on specific conditions.

    Selectors are implemented using a simple if / then / else map, which is a valid YAML dictionary. The condition is evaluated using minijinja and follows the same syntax as a Python expression.

    During rendering, several variables are set based on the platform and variant being built. For example, the unix variable is true for macOS and Linux, while win is true for Windows. Consider the following recipe executed on Linux:

    requirements:\n  host:\n    - if: unix\n      then: unix-tool\n    - if: win\n      then: win-tool\n

    This will be evaluated as:

    requirements:\n  host:\n    - unix-tool\n

    The line containing the Windows-specific configuration is removed. Multiple items can also be selected, such as:

    host:\n  - if: linux\n    then:\n    - linux-tool-1\n    - linux-tool-2\n    - linux-tool-3\n

    For Linux, this will result in:

    host:\n  - linux-tool-1\n  - linux-tool-2\n  - linux-tool-3\n

    Other examples often found in the wild:

    if: build_platform != target_platform ... # true if cross-platform build\nif: osx and arm64 ... # true for apple silicon (osx-arm64)\nif: linux and (aarch64 or ppc64le)) ... # true for linux ppc64le or linux-aarch64\n
    "},{"location":"selectors/#available-variables","title":"Available variables","text":"

    The following variables are available during the initial rendering and afterward:

    Variable Description target_platform the configured target_platform for the build build_platform the build platform linux \"true\" if target_platform is Linux osx \"true\" if target_platform is OSX / macOS win \"true\" if target_platform is Windows unix \"true\" if target_platform is a Unix (macOS or Linux) x86_64, x86, arm64, ... The architecture (\"x86_64\" for 64 bit, \"x86\" for 32 bit, otherwise arm64, aarch64, ppc64le, etc.)

    After the initial phase, when the variant configuration is selected, the variant values are also available in selectors. For example, if the build uses python: 3.8 as a variant, we can use if: python == \"3.8\" to enable a dependency for only when the Python version is 3.8.

    "},{"location":"selectors/#the-cmp-function","title":"The cmp function","text":"

    Inside selectors, one can use a special cmp function to test if the selected variant version has a matching version. For example, if we have again a python: 3.8 variant, we could use the following tests:

    - if: cmp(python, \"3.8\")    # true\n  then: mydep\n- if: cmp(python, \">=3.8\")  # true\n  then: mydep\n- if: cmp(python, \"<3.8\")   # false\n  then: mydep\n

    This function eliminates the need to implement any Python-specific conda-build selectors (such as py3k, py38, etc.) or the py and npy integers.

    Please note that during the initial phase of rendering we do not know the variant, and thus the cmp condition always evaluates to true.

    "},{"location":"special_files/","title":"Activation scripts and other special files","text":"

    A conda package can contain \"special\" files in the prefix. These files are scripts that are executed during activation, installation, or uninstallation process.

    If possible, they should be avoided since they execute arbitrary code at installation time and slow down the installation and activation process.

    "},{"location":"special_files/#activation-scripts","title":"Activation scripts","text":"

    The activation scripts are executed when the environment containing the package is activated (e.g. when doing micromamba activate myenv or pixi run ...).

    The scripts are located in special folders:

    • etc/conda/activate.d/{script.sh/bat} - scripts in this folder are executed before the environment is activated
    • etc/conda/deactivate.d/{script.sh/bat} - scripts in this folder are executed when the environment is deactivated

    The scripts are executed in lexicographical order, so you can prefix them with numbers to control the order of execution.

    To add a script to the package, just make sure that you install the file in this folder. For example, on Linux:

    mkdir -p $PREFIX/etc/conda/activate.d\ncp activate-mypkg.sh $PREFIX/etc/conda/activate.d/10-activate-mypkg.sh\n\nmkdir -p $PREFIX/etc/conda/deactivate.d\ncp deactivate-mypkg.sh $PREFIX/etc/conda/deactivate.d/10-deactivate-mypkg.sh\n
    "},{"location":"special_files/#post-link-and-pre-unlink-scripts","title":"Post-link and pre-unlink scripts","text":"

    The post-link and pre-unlink scripts are executed when the package is installed or uninstalled. They are both heavily discouraged and currently not implemented in rattler, rattler-build, and pixi.

    To create a post-link script for your package, you need to add <package_name>-post-link.{sh/bat} to the bin/ folder of your package. The same is applicable for pre-unlink scripts, just with the name <package_name>-pre-unlink.{sh/bat}.

    For example, for mypkg, you would add mypkg-post-link.sh to the bin/ folder of your package.

    "},{"location":"testing/","title":"Testing packages","text":"

    When you are developing a package, you should write tests for it. The tests are automatically executed right after the package build has finished.

    The tests from the test section are actually packaged into your package and can also be executed straight from the existing package.

    The idea behind adding the tests into the package is that you can execute the tests independently from building the package. That is also why we are shipping a test subcommand that takes as input an existing package and executes the tests:

    rattler-build test --package-file ./xtensor-0.24.6-h60d57d3_0.tar.bz2\n

    Running the above command will extract the package and create a clean environment where the package and dependencies are installed. Then the tests are executed in this newly-created environment.

    If you inspect the package contents, you would find the test files under info/test/*.

    "},{"location":"testing/#how-tests-are-translated","title":"How tests are translated","text":"

    The tests section allows you to specify the following things:

    tests:\n  - script:\n      # commands to run to test the package. If any of the commands\n      # returns with an error code, the test is considered failed.\n      - echo \"Hello world\"\n      - pytest ./tests\n\n    # additional requirements at test time\n    requirements:\n      run:\n        - pytest\n\n    files:\n      # Extra files to be copied to the test directory from the \"work directory\"\n      source:\n        - tests/\n        - test.py\n        - *.sh\n      recipe:\n        - more_tests/*.py\n\n  # This test section tries to import the Python modules and errors if it can't\n  - python:\n      imports:\n        - mypkg\n        - mypkg.subpkg\n

    When you are writing a test for your package, additional files are created and added to your package. These files are placed under the info/tests/{index}/ folder for each test.

    For a script test:

    • All the files are copied straight into the test folder (under info/tests/{index}/)
    • The script is turned into a run_test.sh or run_test.bat file
    • The extra requirements are stored as a JSON file called test_time_dependencies.json

    For a Python import test:

    • A JSON file is created that is called python_test.json and stores the imports to be tested and wether to execute pip check or not. This file is placed under info/tests/{index}/

    For a downstream test:

    • A JSON file is created that is called downstream_test.json and stores the downstream tests to be executed. This file is placed under info/tests/{index}/
    "},{"location":"testing/#legacy-tests","title":"Legacy tests","text":"

    Legacy tests (from conda-build) are still supported for execution. These tests are stored as files under the info/test/ folder.

    The files are:

    • run_test.sh (Unix)
    • run_test.bat (Windows)
    • run_test.py (for the Python import tests)
    • test_time_dependencies.json (for additional dependencies at test time)

    Additionally, the info/test/ folder contains all the files specified in the test section as source_files and files. The tests are executed pointing to this directory as the current working directory.

    "},{"location":"tui/","title":"Terminal User Interface","text":"

    rattler-build offers a terminal user interface for building multiple packages and viewing the logs.

    To launch the TUI, run the build command with the --tui flag as shown below:

    $ rattler-build build -r recipe.yaml --tui\n

    Note

    rattler-build-tui is gated behind the tui feature flag to avoid extra dependencies. Build the project with --features tui arguments to enable the TUI functionality.

    "},{"location":"tui/#key-bindings","title":"Key Bindings","text":"Key Action \u23ce Build a Build all j/k Next/previous package up/down/left/right Scroll logs e Edit recipe (via $EDITOR) c, : Open command prompt (available commands: edit) q, ctrl-c, esc, Quit"},{"location":"variants/","title":"Variant configuration","text":"

    rattler-build can automatically build multiple variants of a given package. For example, a Python package might need multiple variants per Python version (especially if it is a binary package such as numpy).

    For this use case, one can specify variant configuration files. A variant configuration file has 2 special entries and a list of packages with variants. For example:

    variants.yaml
    # special entry #1, the zip keys\nzip_keys:\n- [python, numpy]\n\n# special entry #2, the pin_run_as_build key\npin_run_as_build:\n  numpy:\n    max_pin: 'x.x'\n\n# entries per package version that users are interested in\npython:\n# Note that versions are _strings_ (not numbers)\n- \"3.8\"\n- \"3.9\"\n- \"3.10\"\n\nnumpy:\n- \"1.12\"\n- \"1.12\"\n- \"1.20\"\n

    We can pass a variant configuration file to rattler-build using a command like the following:

    rattler-build build --variant-config ./variants.yaml --recipe myrecipe.yaml\n

    If we have a recipe, that has a build, host or run dependency on python we will build multiple variants of this package, one for each configured python version (\"3.8\", \"3.9\" and \"3.10\").

    For example:

    # ...\nrequirements:\n  host:\n  - python\n

    ... will be rendered as (for the first variant):

    # ...\nrequirements:\n  host:\n- python 3.8*\n

    Note that variants are only applied if the requirement doesn't specify any constraints. If the requirement would be python >3.8,<3.10 then the variant entry would be ignored.

    "},{"location":"variants/#package-hash-from-variant","title":"Package hash from variant","text":"

    You might have wondered what the role of the build string is. The build string is (if not explicitly set) computed from the variant configuration. It serves as a mechanism to discern different build configurations that produce a package with the same name and version.

    The hash is computed by dumping all of the variant configuration values that are used by a given recipe into a JSON file, and then hashing that JSON file.

    For example, in our python example, we would get a variant configuration file that looks something like:

    {\n    \"python\": \"3.8\"\n}\n

    This JSON string is then hashed with the MD5 hash algorithm, and produces the hash. For certain packages (such as Python packages) special rules exists, and the py<Major.Minor> version is prepended to the hash, so that the final hash would look something like py38h123123.

    "},{"location":"variants/#zip-keys","title":"Zip keys","text":"

    Zip keys modify how variants are combined. Usually, each variant key that has multiple entries is expanded to a build matrix. For example, if we have:

    python: [\"3.8\", \"3.9\"]\nnumpy: [\"1.12\", \"1.14\"]\n

    ...then we obtain 4 variants for a recipe that uses both numpy and python:

    - python 3.8, numpy 1.12\n- python 3.8, numpy 1.14\n- python 3.9, numpy 1.12\n- python 3.9, numpy 1.14\n

    However, if we use the zip_keys and specify:

    zip_keys: [\"python\", \"numpy\"]\npython: [\"3.8\", \"3.9\"]\nnumpy: [\"1.12\", \"1.14\"]\n

    ...then the versions are \"zipped up\" and we only get 2 variants. Note that both python and numpy need to specify the exact same number of versions to make this work.

    The resulting variants with the zip applied are:

    - python 3.8, numpy 1.12\n- python 3.9, numpy 1.14\n
    "},{"location":"variants/#pin-run-as-build","title":"Pin run as build","text":"

    The pin_run_as_build key allows the user to inject additional pins. Usually, the run_exports mechanism is used to specify constraints for runtime dependencies from build time dependencies, but pin_run_as_build offers a mechanism to override that if the package does not contain a run exports file.

    For example:

    pin_run_as_build:\n  libcurl:\n    min_pin: 'x'\n    max_pin: 'x'\n

    If we now have a recipe that uses libcurl in the host and run dependencies like:

    requirements:\n  host:\n  - libcurl\n  run:\n  - libcurl\n

    During resolution, libcurl might be evaluated to libcurl 8.0.1 h13284. Our new runtime dependency then looks like:

    requirements:\n  host:\n  - libcurl 8.0.1 h13284\n  run:\n  - libcurl >=8,<9\n
    "},{"location":"variants/#prioritizing-variants","title":"Prioritizing variants","text":"

    You might produce multiple variants for a package, but want to define a priority for a given variant. The variant with the highest priority would be the default package that is selected by the resolver.

    There are two mechanisms to make this possible: mutex packages and the down_prioritize_variant option in the recipe.

    "},{"location":"variants/#the-down_prioritize_variant-option","title":"The down_prioritize_variant option","text":"

    Note

    It is not always necessary to use the down_prioritize_variant option - only if the solver has no other way to prefer a given variant. For example, if you have a package that has multiple variants for different Python versions, the solver will automatically prefer the variant with the highest Python version.

    The down_prioritize_variant option allows you to specify a variant that should be down-prioritized. For example:

    recipe.yaml
    build:\n  variant_config:\n    use_keys:\n      # use cuda from the variant config, e.g. to build multiple CUDA variants\n      - cuda\n    # this will down-prioritize the cuda variant versus other variants of the package\n    down_prioritize_variant: ${{ 1 if cuda else 0 }}\n
    "},{"location":"variants/#mutex-packages","title":"Mutex packages","text":"

    Another way to make sure the right variants are selected are \"mutex\" packages. A mutex package is a package that is mutually exclusive. We use the fact that only one package of a given name can be installed at a time (the solver has to choose).

    A mutex package might be useful to make sure that all packages that depend on BLAS are compiled against the same BLAS implementation. The mutex package will serve the purpose that \"openblas\" and \"mkl\" can never be installed at the same time.

    We could define a BLAS mutex package like this:

    variant_config.yaml
    blas_variant:\n  - \"openblas\"\n  - \"mkl\"\n

    And then the recipe.yaml for the mutex package could look like this:

    recipe.yaml
    package:\n  name: blas_mutex\n  version: 1.0\n\nbuild:\n  string: ${{ blas_variant }}${{ hash }}_${{ build_number }}\n  variant_config:\n    # make sure that `openblas` is preferred over `mkl`\n    down_prioritize_variant: ${{ 1 if blas_variant == \"mkl\" else 0 }}\n

    This will create two package: blas_mutex-1.0-openblas and blas_mutex-1.0-mkl. Only one of these packages can be installed at a time because they share the same name. The solver will then only select one of these two packages.

    The blas package in turn should have a run_export for the blas_mutex package, so that any package that links against blas also has a dependency on the correct blas_mutex package:

    recipe.yaml
    package:\n  name: openblas\n  version: 1.0\n\nrequirements:\n  # any package depending on openblas should also depend on the correct blas_mutex package\n  run_export:\n    # Add a run export on _any_ version of the blas_mutex package whose build string starts with \"openblas\"\n    - blas_mutex * openblas*\n

    Then the recipe of a package that wants to build two variants, one for openblas and one for mkl could look like this:

    recipe.yaml
    package:\n  name: fastnumerics\n  version: 1.0\n\nrequirements:\n  host:\n    # build against both openblas and mkl\n    - ${{ blas_variant }}\n  run:\n    # implicitly adds the correct blas_mutex package through run exports\n    # - blas_mutex * ${{ blas_variant }}*\n
    "},{"location":"tutorials/cpp/","title":"Packaging a C++ package","text":"

    This tutorial will guide you though making a C++ package with rattler-build.

    "},{"location":"tutorials/cpp/#header-only-library","title":"Header-only library","text":"

    Here we will build a package for the header-only library xtensor. The package depends on cmake and ninja for building.

    The main \"trick\" is to instruct CMake to install the headers in the right prefix, by using the CMAKE_INSTALL_PREFIX setting. On Unix, conda packages follow the regular \"Unix\" prefix standard ($PREFIX/include and $PREFIX/lib etc.). On Windows, it also looks like a \"Unix\" prefix but it's nested in a Library folder ($PREFIX/Library/include and $PREFIX/Library/lib etc.). For this reason, there are some handy variables (%LIBRARY_PREFIX% and %LIBRARY_BIN%) that can be used in the CMake command to install the headers and libraries in the right place.

    recipe.yaml
    context:\n  version: \"0.24.6\"\n\npackage:\n  name: xtensor\n  version: ${{ version }}\n\nsource:\n  url: https://github.com/xtensor-stack/xtensor/archive/${{ version }}.tar.gz\n  sha256: f87259b51aabafdd1183947747edfff4cff75d55375334f2e81cee6dc68ef655\n\nbuild:\n  number: 0\n  script:\n    - if: win\n      then: |\n        cmake -GNinja \\\n            -D BUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% \\\n            %SRC_DIR%\n        ninja install\n      else: |\n        cmake ${CMAKE_ARGS} -GNinja -DBUILD_TESTS=OFF \\\n              -DCMAKE_INSTALL_PREFIX=$PREFIX \\\n              $SRC_DIR\n        ninja install\n\nrequirements:\n  build:\n    - ${{ compiler('cxx') }}\n    - cmake\n    - ninja\n  host:\n    - xtl >=0.7,<0.8\n  run:\n    - xtl >=0.7,<0.8\n  run_constraints:\n    - xsimd >=8.0.3,<10\n\ntests:\n  - package_contents:\n      include:\n        - xtensor/xarray.hpp\n      files:\n        - share/cmake/xtensor/xtensorConfig.cmake\n        - share/cmake/xtensor/xtensorConfigVersion.cmake\n\nabout:\n  homepage: https://github.com/xtensor-stack/xtensor\n  license: BSD-3-Clause\n  license_file: LICENSE\n  summary: The C++ tensor algebra library\n  description: Multi dimensional arrays with broadcasting and lazy computing\n  documentation: https://xtensor.readthedocs.io\n  repository: https://github.com/xtensor-stack/xtensor\n\nextra:\n  recipe-maintainers:\n    - some-maintainer\n
    "},{"location":"tutorials/cpp/#a-c-application","title":"A C++ application","text":"

    In this example we will build poppler, a C++ application to manipulate PDF files from the command line. The final package will install a few tools into the bin/ folder.

    recipe.yaml
    context:\n  version: \"24.01.0\"\n\npackage:\n  name: poppler\n  version: ${{ version }}\n\nsource:\n  url: https://poppler.freedesktop.org/poppler-${{ version }}.tar.xz\n  sha256: c7def693a7a492830f49d497a80cc6b9c85cb57b15e9be2d2d615153b79cae08\n\nbuild:\n  script: poppler-build.sh\n\nrequirements:\n  build:\n    - ${{ compiler('c') }} # (1)!\n    - ${{ compiler('cxx') }}\n    - pkg-config\n    - cmake\n    - ninja\n  host:\n    - cairo # (2)!\n    - fontconfig\n    - freetype\n    - glib\n    - libboost-headers\n    - libjpeg-turbo\n    - lcms2\n    - libiconv\n    - libpng\n    - libtiff\n    - openjpeg\n    - zlib\n\ntests:\n  - script:\n      - pdfinfo -listenc  # (3)!\n      - pdfunite --help\n      - pdftocairo --help\n
    1. We use the compiler function to get the compiler for C and C++.
    2. These are all the dependencies that we link against
    3. The script test just executes some of the installed tools to check if they are working. You could run some more complex tests if you want.

    We've defined an external build script in the recipe. This will be searched next to the recipe by the file name given (or by the default name build.sh or build.bat).

    poppler-build.sh
    #! /bin/bash\n\nextra_cmake_args=(\n    -GNinja\n    -DCMAKE_INSTALL_LIBDIR=lib\n    -DENABLE_UNSTABLE_API_ABI_HEADERS=ON\n    -DENABLE_GPGME=OFF\n    -DENABLE_LIBCURL=OFF\n    -DENABLE_LIBOPENJPEG=openjpeg2\n    -DENABLE_QT6=OFF\n    -DENABLE_QT5=OFF\n    -DENABLE_NSS3=OFF\n)\n\nmkdir build && cd build\n\ncmake ${CMAKE_ARGS} \"${extra_cmake_args[@]}\" \\\n    -DCMAKE_PREFIX_PATH=$PREFIX \\\n    -DCMAKE_INSTALL_PREFIX=$PREFIX \\\n    -DTIFF_INCLUDE_DIR=$PREFIX/include \\\n    $SRC_DIR\n\nninja\n\n# The `install` command will take care of copying the files to the right place\nninja install\n

    When you look at the output of the rattler-build command you might see some interesting information.

    Our package will have some run dependencies (even though we did not specify any). These run-dependencies come from the \"run-exports\" of the packages we depend on in the host section of the recipe. This is shown in the output of rattler-build along with \"RE of [host: package]\".

    Basically, libcurl declares \"if you depend on me in the host section, then you should also depend on me during runtime with the following version ranges\". This is important to make linking to shared libraries work correctly.

    Run dependencies:\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u252c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n\u2502 Name                  \u2506 Spec                                         \u2502\n\u255e\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2561\n\u2502 libcurl               \u2506 >=8.5.0,<9.0a0 (RE of [host: libcurl])       \u2502\n\u2502 fontconfig            \u2506 >=2.14.2,<3.0a0 (RE of [host: fontconfig])   \u2502\n\u2502 fonts-conda-ecosystem \u2506 (RE of [host: fontconfig])                   \u2502\n\u2502 lcms2                 \u2506 >=2.16,<3.0a0 (RE of [host: lcms2])          \u2502\n\u2502 gettext               \u2506 >=0.21.1,<1.0a0 (RE of [host: gettext])      \u2502\n\u2502 freetype              \u2506 >=2.12.1,<3.0a0 (RE of [host: freetype])     \u2502\n\u2502 openjpeg              \u2506 >=2.5.0,<3.0a0 (RE of [host: openjpeg])      \u2502\n\u2502 libiconv              \u2506 >=1.17,<2.0a0 (RE of [host: libiconv])       \u2502\n\u2502 cairo                 \u2506 >=1.18.0,<2.0a0 (RE of [host: cairo])        \u2502\n\u2502 libpng                \u2506 >=1.6.42,<1.7.0a0 (RE of [host: libpng])     \u2502\n\u2502 libzlib               \u2506 >=1.2.13,<1.3.0a0 (RE of [host: zlib])       \u2502\n\u2502 libtiff               \u2506 >=4.6.0,<4.7.0a0 (RE of [host: libtiff])     \u2502\n\u2502 libjpeg-turbo         \u2506 >=3.0.0,<4.0a0 (RE of [host: libjpeg-turbo]) \u2502\n\u2502 libglib               \u2506 >=2.78.3,<3.0a0 (RE of [host: glib])         \u2502\n\u2502 libcxx                \u2506 >=16 (RE of [build: clangxx_osx-arm64])      \u2502\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2534\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n

    We can also observe some \"linking\" information in the output, for example on macOS:

    [lib/libpoppler-glib.8.26.0.dylib] links against:\n \u251c\u2500 @rpath/libgio-2.0.0.dylib\n \u251c\u2500 @rpath/libgobject-2.0.0.dylib\n \u251c\u2500 /usr/lib/libSystem.B.dylib\n \u251c\u2500 @rpath/libglib-2.0.0.dylib\n \u251c\u2500 @rpath/libpoppler.133.dylib\n \u251c\u2500 @rpath/libfreetype.6.dylib\n \u251c\u2500 @rpath/libc++.1.dylib\n \u251c\u2500 @rpath/libpoppler-glib.8.dylib\n \u2514\u2500 @rpath/libcairo.2.dylib\n

    rattler-build performs these checks to make sure that:

    1. All shared libraries that are linked against are present in the run dependencies. If you link against a library that is not explicitly mentioned in your recipe, you will get an \"overlinking\" warning.
    2. You don't require any packages in host that you are not linking against. this is the case, you will get an \"overdepending\" warning.
    "},{"location":"tutorials/python/","title":"Writing a Python package","text":"

    Writing a Python package is fairly straightforward, especially for \"Python-only\" packages. But it becomes really interesting when compiled extensions are involved (we will look at this in the second example).

    The following recipe uses the noarch: python setting to build a noarch package that can be installed on any platform without modification. This is very handy for packages that are pure Python and do not contain any compiled extensions. Additionally, noarch: python packages work with a range of Python versions (contrary to packages with compiled extensions that are tied to a specific Python version).

    recipe.yaml
    context:\n  version: \"8.1.2\"\n\npackage:\n  name: ipywidgets\n  version: ${{ version }}\n\nsource:\n  url: https://pypi.io/packages/source/i/ipywidgets/ipywidgets-${{ version }}.tar.gz\n  sha256: d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9\n\nbuild:\n  noarch: python # (1)!\n  script: pip install . -v\n\nrequirements:\n  # note that there is no build section\n  host:\n    - pip\n    - python >=3.7\n    - setuptools\n    - wheel\n  run:\n    - comm >=0.1.3\n    - ipython >=6.1.0\n    - jupyterlab_widgets >=3.0.10,<3.1.0\n    - python >=3.7\n    - traitlets >=4.3.1\n    - widgetsnbextension >=4.0.10,<4.1.0\n\ntests:\n  - python:\n      imports:\n        - ipywidgets # (2)!\n\nabout:\n  homepage: https://github.com/ipython/ipywidgets\n  license: BSD-3-Clause\n  license_file: LICENSE\n  summary: Jupyter Interactive Widgets\n  description: |\n    ipywidgets are interactive HTML widgets for Jupyter notebooks and the IPython kernel.\n  documentation: https://ipywidgets.readthedocs.io/en/latest/\n
    1. The noarch: python line tells rattler-build that this package is pure Python and can be one-size-fits-all. noarch packages can be installed on any platform without modification which is very handy.
    2. The imports section in the tests is used to check that the package is installed correctly and can be imported.
    "},{"location":"tutorials/python/#a-python-package-with-compiled-extensions","title":"A Python package with compiled extensions","text":"

    We will build a package for numpy \u2013 which most definitely contains compiled code. Since compiled code is python version-specific, we will need to specify the python version explictly. This is most easily done with a \"variant config\" file:

    variant_config.yaml
    python:\n  - 3.11\n  - 3.12\n

    This will replace any python found in the recipe with the versions specified in the variant_config.yaml file.

    recipe.yaml
    context:\n  version: 1.26.4\n\npackage:\n  name: numpy\n  version: ${{ version }}\n\nsource:\n  - url: https://github.com/numpy/numpy/releases/download/v${{ version }}/numpy-${{ version }}.tar.gz\n    sha256: 2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010\n\nbuild:\n  python:\n    entry_points:\n      - f2py = numpy.f2py.f2py2e:main  # [win]\n\nrequirements:\n  build:\n    - ${{ compiler('c') }}\n    - ${{ compiler('cxx') }}\n  host:\n    # note: variant is injected here!\n    - python\n    - pip\n    - meson-python\n    - ninja\n    - pkg-config\n    - python-build\n    - cython\n    - libblas\n    - libcblas\n    - liblapack\n  run:\n    - python\n  run_exports:\n    - ${{ pin_subpackage(\"numpy\") }}\n\ntests:\n  - python:\n      imports:\n        - numpy\n        - numpy.array_api\n        - numpy.array_api.linalg\n        - numpy.ctypeslib\n\n  - script:\n    - f2py -h\n\nabout:\n  homepage: http://numpy.org/\n  license: BSD-3-Clause\n  license_file: LICENSE.txt\n  summary: The fundamental package for scientific computing with Python.\n  documentation: https://numpy.org/doc/stable/\n  repository: https://github.com/numpy/numpy\n

    The build script for Unix:

    build.sh
    mkdir builddir\n\n$PYTHON -m build -w -n -x \\\n    -Cbuilddir=builddir \\\n    -Csetup-args=-Dblas=blas \\\n    -Csetup-args=-Dlapack=lapack\n\n$PYTHON -m pip install dist/numpy*.whl\n

    The build script for Windows:

    build.bat
    mkdir builddir\n\n%PYTHON% -m build -w -n -x ^\n    -Cbuilddir=builddir ^\n    -Csetup-args=-Dblas=blas ^\n    -Csetup-args=-Dlapack=lapack\nif %ERRORLEVEL% neq 0 exit 1\n\n:: `pip install dist\\numpy*.whl` does not work on windows,\n:: so use a loop; there's only one wheel in dist/ anyway\nfor /f %%f in ('dir /b /S .\\dist') do (\n    pip install %%f\n    if %ERRORLEVEL% neq 0 exit 1\n)\n
    "},{"location":"tutorials/python/#running-the-recipe","title":"Running the recipe","text":"

    Running this recipe with the variant config file will build a a total of 2 numpy packages:

    rattler-build build --recipe ./numpy \\\n  --variant-config ./numpy/variant_config.yaml\n

    At the beginning of the build process, rattler-build will print the following message to show you the variants it found:

    Found variants:\n\nnumpy-1.26.4-py311h5f8ada8_0\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u252c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n\u2502 Variant         \u2506 Version   \u2502\n\u255e\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2561\n\u2502 python          \u2506 3.11      \u2502\n\u2502 target_platform \u2506 osx-arm64 \u2502\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2534\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n\nnumpy-1.26.4-py312h440f24a_0\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u252c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n\u2502 Variant         \u2506 Version   \u2502\n\u255e\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2561\n\u2502 python          \u2506 3.12      \u2502\n\u2502 target_platform \u2506 osx-arm64 \u2502\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2534\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n
    "},{"location":"tutorials/rust/","title":"Building a Rust package","text":"

    Building a Rust package is very straightforward with rattler-build. In this example, we build the a package for the cargo-edit utility, which is a utility for managing Cargo dependencies from the command line.

    One tiny challenge is that the Rust compiler is not \"pre-configured\" and we need to add a variant_config.yaml file to the package:

    variant_config.yaml
    rust_compiler: rust\n

    This will tell rattler-build what to insert for the ${{ compiler('rust') }} Jinja function.

    Note

    The ${{ compiler(...) }} functions are very useful in the context of cross-compilation. When the function is evaluated it will insert the correct compiler (as selected with the variant config) as well the target_platform. The \"rendered\" compiler will look like rust_linux-64 when you are targeting the linux-64 platform.

    You can read more about this in the cross-compilation section.

    Then we can write the recipe for the package like so:

    recipe.yaml
    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n\ncontext:\n  version: \"0.11.9\"\n\npackage:\n  name: cargo-edit\n  version: ${{ version }}\n\nsource:\n  url: https://github.com/killercup/cargo-edit/archive/refs/tags/v${{ version }}.tar.gz\n  sha256: 46670295e2323fc2f826750cdcfb2692fbdbea87122fe530a07c50c8dba1d3d7\n\nbuild:\n  script:\n    # we bundle all the licenses of the dependencies into a THIRDPARTY.yml file and include it in the package\n    - cargo-bundle-licenses --format yaml --output ${SRC_DIR}/THIRDPARTY.yml\n    - $BUILD_PREFIX/bin/cargo install --locked --bins --root ${PREFIX} --path .\n\nrequirements:\n  build:\n    - ${{ compiler('rust') }}\n    - cargo-bundle-licenses\n\ntests:\n  - script:\n      - cargo-upgrade --help\n\nabout:\n  homepage: https://github.com/killercup/cargo-edit\n  license: MIT\n  license_file:\n    - LICENSE\n    - THIRDPARTY.yml\n  description: \"A utility for managing cargo dependencies from the command line.\"\n  summary: \"A utility for managing cargo dependencies from the command line.\"\n

    To build this recipe, simply run:

    rattler-build build \\\n    --recipe ./cargo-edit/recipe.yaml \\\n    --variant-config ./cargo-edit/variant_config.yaml\n
    "}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":""},{"location":"#rattler-build-a-fast-conda-package-builder","title":"rattler-build: A Fast Conda Package Builder","text":"

    The rattler-build tooling and library creates cross-platform relocatable binaries / packages from a simple recipe format. The recipe format is heavily inspired by conda-build and boa, and the output of a regular rattler-build run is a package that can be installed using mamba, rattler or conda.

    rattler-build does not have any dependencies on conda-build or Python and works as a standalone binary.

    "},{"location":"#installation","title":"Installation","text":"

    You can grab a prerelease version of rattler-build from the Github Releases.

    It is (of course) also available from conda-forge:

    pixi global install rattler-build\n# or with micromamba\nmicromamba install rattler-build -c conda-forge\n

    Alternatively, you can install rattler-build via Homebrew:

    brew install rattler-build\n

    rattler-build is also available on Arch Linux in the extra repository:

    pacman -S rattler-build\n
    "},{"location":"#dependencies","title":"Dependencies","text":"

    Currently rattler-build needs some dependencies on the host system which are executed as subprocess. We plan to reduce the number of external dependencies over time by writing what we need in Rust to make rattler-build fully self-contained.

    • tar to unpack tarballs downloaded from the internet in a variety of formats. .gz, .bz2 and .xz are widely used and one might have to install the compression packages as well (e.g. gzip, bzip2, ...)
    • patch to patch source code after downloading
    • install_name_tool is necessary on macOS to rewrite the rpath of shared libraries and executables to make it relative
    • patchelf is required on Linux to rewrite the rpath and runpath of shared libraries and executables
    • git to checkout Git repositories (not implemented yet, but will require git in the future)
    • msvc on Windows because we cannot ship the MSVC compiler on conda-forge (needs to be installed on the host machine)

    On Windows, to obtain these dependencies from conda-forge, one can install m2-patch, m2-bzip2, m2-gzip, m2-tar.

    "},{"location":"#documentation","title":"Documentation","text":"

    We have extensive documentation for rattler-build. You can find the book here.

    "},{"location":"#github-action","title":"GitHub Action","text":"

    There is a GitHub Action for rattler-build. It can be used to install rattler-build in CI/CD workflows and run a build command. Please check out the GitHub Action documentation for more information.

    "},{"location":"#usage","title":"Usage","text":"

    rattler-build comes with two commands: build and test.

    The build command takes a --recipe recipe.yaml as input and produces a package as output. The test subcommand can be used to test existing packages (tests are shipped with the package).

    "},{"location":"#the-recipe-format","title":"The recipe format","text":"

    Note You can find all examples below in the examples folder in the codebase and run them with rattler-build.

    A simple example recipe for the xtensor header-only C++ library:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n\ncontext:\n  name: xtensor\n  version: 0.24.6\n\npackage:\n  name: ${{ name|lower }}\n  version: ${{ version }}\n\nsource:\n  url: https://github.com/xtensor-stack/xtensor/archive/${{ version }}.tar.gz\n  sha256: f87259b51aabafdd1183947747edfff4cff75d55375334f2e81cee6dc68ef655\n\nbuild:\n  number: 0\n  script:\n    - if: win\n      then: |\n        cmake -G \"NMake Makefiles\" -D BUILD_TESTS=OFF -D CMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% %SRC_DIR%\n        nmake\n        nmake install\n      else: |\n        cmake ${CMAKE_ARGS} -DBUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=$PREFIX $SRC_DIR -DCMAKE_INSTALL_LIBDIR=lib\n        make install\n\nrequirements:\n  build:\n    - ${{ compiler('cxx') }}\n    - cmake\n    - if: unix\n      then: make\n  host:\n    - xtl >=0.7,<0.8\n  run:\n    - xtl >=0.7,<0.8\n  run_constraints:\n    - xsimd >=8.0.3,<10\n\ntests:\n  - script:\n    - if: unix or emscripten\n      then:\n        - test -d ${PREFIX}/include/xtensor\n        - test -f ${PREFIX}/include/xtensor/xarray.hpp\n        - test -f ${PREFIX}/share/cmake/xtensor/xtensorConfig.cmake\n        - test -f ${PREFIX}/share/cmake/xtensor/xtensorConfigVersion.cmake\n    - if: win\n      then:\n        - if not exist %LIBRARY_PREFIX%\\include\\xtensor\\xarray.hpp (exit 1)\n        - if not exist %LIBRARY_PREFIX%\\share\\cmake\\xtensor\\xtensorConfig.cmake (exit 1)\n        - if not exist %LIBRARY_PREFIX%\\share\\cmake\\xtensor\\xtensorConfigVersion.cmake (exit 1)\n\nabout:\n  homepage: https://github.com/xtensor-stack/xtensor\n  license: BSD-3-Clause\n  license_file: LICENSE\n  summary: The C++ tensor algebra library\n  description: Multi dimensional arrays with broadcasting and lazy computing\n  documentation: https://xtensor.readthedocs.io\n  repository: https://github.com/xtensor-stack/xtensor\n\nextra:\n  recipe-maintainers:\n    - some-maintainer\n

    A recipe for the rich Python package (using noarch):

    context:\n  version: \"13.4.2\"\n\npackage:\n  name: \"rich\"\n  version: ${{ version }}\n\nsource:\n  - url: https://pypi.io/packages/source/r/rich/rich-${{ version }}.tar.gz\n    sha256: d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898\n\nbuild:\n  # Thanks to `noarch: python` this package works on all platforms\n  noarch: python\n  script:\n    - python -m pip install . -vv --no-deps --no-build-isolation\n\nrequirements:\n  host:\n    - pip\n    - poetry-core >=1.0.0\n    - python 3.10\n  run:\n    # sync with normalized deps from poetry-generated setup.py\n    - markdown-it-py >=2.2.0\n    - pygments >=2.13.0,<3.0.0\n    - python 3.10\n    - typing_extensions >=4.0.0,<5.0.0\n\ntests:\n  - python:\n      imports:\n        - rich\n      pip_check: true\n\nabout:\n  homepage: https://github.com/Textualize/rich\n  license: MIT\n  license_file: LICENSE\n  summary: Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal\n  description: |\n    Rich is a Python library for rich text and beautiful formatting in the terminal.\n\n    The Rich API makes it easy to add color and style to terminal output. Rich\n    can also render pretty tables, progress bars, markdown, syntax highlighted\n    source code, tracebacks, and more \u2014 out of the box.\n  documentation: https://rich.readthedocs.io\n  repository: https://github.com/Textualize/rich\n

    A recipe for the curl library:

    context:\n  version: \"8.0.1\"\n\npackage:\n  name: curl\n  version: ${{ version }}\n\nsource:\n  url: http://curl.haxx.se/download/curl-${{ version }}.tar.bz2\n  sha256: 9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf\n\nbuild:\n  number: 0\n\nrequirements:\n  build:\n    - ${{ compiler('c') }}\n    - if: win\n      then:\n        - cmake\n        - ninja\n    - if: unix\n      then:\n        - make\n        - perl\n        - pkg-config\n        - libtool\n  host:\n    - if: linux\n      then:\n        - openssl\n\nabout:\n  homepage: http://curl.haxx.se/\n  license: MIT/X derivate (http://curl.haxx.se/docs/copyright.html)\n  license_file: COPYING\n  summary: tool and library for transferring data with URL syntax\n  description: |\n    Curl is an open source command line tool and library for transferring data\n    with URL syntax. It is used in command lines or scripts to transfer data.\n  documentation: https://curl.haxx.se/docs/\n  repository: https://github.com/curl/curl\n

    For the curl library recipe, two additional script files (build.sh and build.bat) are needed.

    build.sh

    #!/bin/bash\n\n# Get an updated config.sub and config.guess\ncp $BUILD_PREFIX/share/libtool/build-aux/config.* .\n\nif [[ $target_platform =~ linux.* ]]; then\n    USESSL=\"--with-openssl=${PREFIX}\"\nelse\n    USESSL=\"--with-secure-transport\"\nfi;\n\n./configure \\\n    --prefix=${PREFIX} \\\n    --host=${HOST} \\\n    ${USESSL} \\\n    --with-ca-bundle=${PREFIX}/ssl/cacert.pem \\\n    --disable-static --enable-shared\n\nmake -j${CPU_COUNT} ${VERBOSE_AT}\nmake install\n\n# Includes man pages and other miscellaneous.\nrm -rf \"${PREFIX}/share\"\n

    build.bat

    mkdir build\n\ncmake -GNinja ^\n      -DCMAKE_BUILD_TYPE=Release ^\n      -DBUILD_SHARED_LIBS=ON ^\n      -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^\n      -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX% ^\n      -DCURL_USE_SCHANNEL=ON ^\n      -DCURL_USE_LIBSSH2=OFF ^\n      -DUSE_ZLIB=ON ^\n      -DENABLE_UNICODE=ON ^\n      %SRC_DIR%\n\nIF %ERRORLEVEL% NEQ 0 exit 1\n\nninja install --verbose\n
    "},{"location":"authentication_and_upload/","title":"Server authentication","text":""},{"location":"authentication_and_upload/#authenticating-with-a-server","title":"Authenticating with a server","text":"

    You may want to use private channels for which you need to be authenticated. To do this ephemerally you can use the RATTLER_AUTH_FILE environment variable to point to a JSON file with the following structure:

    {\n    \"*.prefix.dev\": {\n        \"BearerToken\": \"your_token\"\n    },\n    \"otherhost.com\": {\n        \"BasicHttp\": {\n            \"username\": \"your_username\",\n            \"password\": \"your_password\"\n        }\n    },\n    \"anaconda.org\": {\n        \"CondaToken\": \"your_token\"\n    }\n}\n

    The keys are the host names. You can use wildcard specifiers here (e.g. *.prefix.dev to match all subdomains of prefix.dev, such as repo.prefix.dev). This will allow you to also obtain packages from any private channels that you have access to.

    The following known authentication methods are supported:

    • BearerToken: prefix.dev
    • CondaToken: anaconda.org, quetz
    • BasicHttp: artifactory
    "},{"location":"authentication_and_upload/#uploading-packages","title":"Uploading packages","text":"

    If you want to upload packages, then rattler-build comes with a built-in upload command. There are 4 options:

    • prefix.dev: you can create public or private channels on the prefix.dev hosted server
    • anaconda.org: you can upload packages to the free anaconda.org server
    • quetz: you can host your own quetz server and upload packages to it
    • artifactory: you can upload packages to a JFrog Artifactory server

    The command is:

    rattler-build upload <server> <package_files>\n

    Note: you can also use the RATTLER_AUTH_FILE environment variable to authenticate with the server.

    "},{"location":"authentication_and_upload/#prefixdev","title":"prefix.dev","text":"

    To upload to prefix.dev, you need to have an account and a token. You can create a token in the settings of your account. The token is used to authenticate the upload.

    export PREFIX_API_KEY=<your_token>\nrattler-build upload prefix -c <channel> <package_files>\n

    You can also use the --api-key=$PREFIX_API_KEY option to pass the token directly to the command. Note that you need to have created the channel on the prefix.dev website before you can upload to it.

    "},{"location":"authentication_and_upload/#quetz","title":"Quetz","text":"

    You need to pass a token and API key to upload to a channel on your own Quetz server. The token is used to authenticate the upload.

    export QUETZ_API_KEY=<your_token>\nrattler-build upload quetz -u <url> -c <channel> <package_files>\n
    "},{"location":"authentication_and_upload/#artifactory","title":"Artifactory","text":"

    To upload to an Artifactory server, you need to pass a username and password. The username and password are used to authenticate the upload.

    export ARTIFACTORY_USERNAME=<your_username>\nexport ARTIFACTORY_PASSWORD=<your_password>\nrattler-build upload artifactory -u <url> -c <channel> <package_files>\n
    "},{"location":"authentication_and_upload/#anacondaorg","title":"anaconda.org","text":"

    To upload to anaconda.org, you need to specify the owner and API key. The API key is used to authenticate the upload.

    The owner is the owner of the distribution, for example, your user name or organization.

    One can also specify a label such as dev for release candidates using the -c flag. The default value is main.

    You can also add the --force argument to forcibly upload a new package (and overwrite any existing ones).

    export ANACONDA_API_KEY=<your_token>\nrattler-build upload anaconda -o <your_username> -c <label> <package_files>\n
    "},{"location":"automatic_linting/","title":"Enabling Automatic Linting in VSCode","text":"

    Our new recipe format adheres to a strict JSON schema, which you can access here.

    This schema is implemented using pydantic and can be rendered into a JSON schema file. The YAML language server extension in VSCode is capable of recognizing this schema, providing useful hints during the editing process.

    To enable automatic linting with the YAML language server, you need to add the following line at the beginning of your recipe file:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n

    Alternatively, if you prefer not to add this line to your file, you can install the JSON Schema Store Catalog extension. This extension will also enable automatic linting for your recipe files.

    "},{"location":"build_options/","title":"Advanced build options","text":"

    There are some specialized build options to control various features:

    • prefix replacement
    • variant configuration
    • encoded file type

    These are all found under the build key in the recipe.yaml.

    "},{"location":"build_options/#always-include-and-always-copy-files","title":"Always include and always copy files","text":"

    There are some options that control the inclusion of files in the final package.

    The always_include_files option can be used to include files even if they are already in the environment as part of some other host dependency. This is normally \"clobbering\" and should be used with caution (since packages should not have any overlapping files).

    The always_copy_files option can be used to copy files instead of linking them. This is useful for files that might be modified inside the environment (e.g. configuration files). Normally, files are linked from a central cache into the environment to save space \u2013 that means that files modified in one environment will be modified in all environments. This is not always desirable, and in that case you can use the always_copy_files option.

    How always_copy_files works

    The always_copy_files option works by setting the no_link option in the info/paths.json to true for the files in question. This means that the files are copied instead of linked when the package is installed.

    recipe.yaml
    build:\n  # include files even if they are already in the environment\n  # as part of some other host dependency\n  always_include_files: list of globs\n\n  # do not soft- or hard-link these files, but always copy them was `no_link`\n  always_copy_files: list of globs\n

    Glob patterns

    Glob patterns are used througout the build options to specify files. The patterns are matched against the relative path of the file in the build directory. Patterns can contain * to match any number of characters, ? to match a single character, and ** to match any number of directories.

    For example:

    • *.txt matches all files ending in .txt
    • **/*.txt matches all files ending in .txt in any directory
    • **/test_*.txt matches all files starting with test_ and ending in .txt in any directory
    "},{"location":"build_options/#merge-build-and-host-environments","title":"Merge build and host environments","text":"

    In very rare cases you might want to merge the build and host environments to obtain the \"legacy\" behavior of conda-build.

    recipe.yaml
    build:\n  # merge the build and host environments (used in many R packages on Windows)\n  merge_build_and_host_envs: bool (defaults to false)\n
    "},{"location":"build_options/#prefix-detection-replacement-options","title":"Prefix detection / replacement options","text":"

    During installation time the \"install\"-prefix is injected into text and binary files. Sometimes this is not desired, and sometimes the user might want closer control over the automatic text/binary detection.

    The main difference between prefix replacement for text and binary files is that for binary files, the prefix string is padded with null bytes to match the length of the original prefix. The original prefix is the very long placeholder string that you might have seen in the build process.

    On Windows, binary prefix replacement is never performed.

    recipe.yaml
    package:\n  name: mypackage\n  version: 1.0\n\nbuild:\n  # settings concerning the prefix detection in files\n  prefix_detection:\n    # force the file type of the given files to be TEXT or BINARY\n    # for prefix replacement\n    force_file_type:\n      # force TEXT file type (list of globs)\n      text: list of globs\n      # force binary file type (list of globs)\n      binary: list of globs\n\n    # ignore all or specific files for prefix replacement`\n    ignore: bool | [path] (defaults to false)\n\n    # wether to detect binary files with prefix or not\n    # defaults to true on Unix and (always) false on Windows\n    ignore_binary_files: bool\n
    "},{"location":"build_options/#variant-configuration","title":"Variant configuration","text":"

    To control the variant precisely you can use the \"variant configuration\" options.

    A variant package has the same version number, but different \"hash\" and potentially different dependencies or build options. Variant keys are extracted from the variant_config.yaml file and usually any used Jinja variables or dependencies without version specifier are used as variant keys.

    Variant keys can also be forcibly set or ignored with the use_keys and ignore_keys options.

    In order to decide which of the variant packages to prefer and install by default, the down_prioritize_variant option can be used. The higher the value, the less preferred the variant is.

    More about variants can be found in the variant documentation.

    The following options are available in the build section to control the variant configuration:

    recipe.yaml
    build:\n  # settings for the variant\n  variant:\n    # Keys to forcibly use for the variant computation\n    # even if they are not in the dependencies\n    use_keys: list of strings\n\n    # Keys to forcibly ignore for the variant computation\n    # even if they are in the dependencies\n    ignore_keys: list of strings\n\n    # used to prefer this variant less\n    down_prioritize_variant: integer (defaults to 0, higher is less preferred)\n
    "},{"location":"build_options/#dynamic-linking-configuration","title":"Dynamic linking configuration","text":"

    After the package is built, rattler-build performs some \"post-processing\" on the binaries and libraries.

    This entails making the shared libraries relocatable and checking that all linked libraries are present in the run requirements. The following settings control this behavior.

    With the rpath option you can forcibly set the rpath of the shared libraries. The path is relative to the install prefix. Any rpath setting is ignored on Windows.

    The rpath_allowlist option can be used to allow the rpath to point to locations outside of the environment. This is useful if you want to link against libraries that are not part of the conda environment (e.g. proprietary software).

    If you want to stop rattler-build from relocating the binaries, you can set binary_relocation to false. If you want to only relocate some binaries, you can select the relevant ones with a glob pattern.

    To read more about rpaths and how rattler-build creates relocatable binary packages, see the internals docs.

    If you link against some libraries (possibly even outside of the prefix, in a system location), then you can use the missing_dso_allowlist to allow linking against these and suppress any warnings. This list is pre-populated with a list of known system libraries on the different operating systems.

    As part of the post-processing, rattler-build checks for overlinking and overdepending. \"Overlinking\" is when a binary links against a library that is not specified in the run requirements. This is usually a mistake because the library would not be present in the environment when the package is installed.

    Conversely, \"overdepending\" is when a library is part of the run requirements, but is not actually used by any of the binaries/libraries in the package.

    recipe.yaml
    build:\n  # settings for shared libraries and executables\n  dynamic_linking:\n    # linux only, list of rpaths relative to the installation prefix\n    rpaths: list of paths (defaults to ['lib/'])\n\n    # Allow runpath / rpath to point to these locations\n    # outside of the environment\n    rpath_allowlist: list of globs\n\n    # wether to relocate binaries or not. If this is a list of paths, then\n    # only the listed paths are relocated\n    binary_relocation: bool (defaults to true) | list of globs\n\n    # Allow linking against libraries that are not in the run requirements\n    missing_dso_allowlist: list of globs\n\n    # what to do when detecting overdepending\n    overdepending_behavior: \"ignore\" or \"error\" # (defaults to \"error\")\n\n    # what to do when detecting overlinking\n    overlinking_behavior: \"ignore\" or \"error\" # (defaults to \"error\")\n
    "},{"location":"build_script/","title":"Build scripts","text":"

    The build.sh file is the build script for Linux and macOS and build.bat is the build script for Windows. These scripts contain the logic that carries out your build steps. Anything that your build script copies into the $PREFIX or %PREFIX% folder will be included in your output package.

    For example, this build.sh:

    mkdir -p $PREFIX/bin\ncp $RECIPE_DIR/my_script_with_recipe.sh $PREFIX/bin/super-cool-script.sh\n

    There are many environment variables defined for you to use in build.sh and build.bat. Please see environment variables for more information.

    build.sh and build.bat are optional. You can instead use the build/script key in your recipe.yaml, with each value being either a string command or a list of string commands. Any commands you put there must be able to run on every platform for which you build. For example, you can't use the cp command because cmd.exe won't understand it on Windows.

    build.sh is run with bash and build.bat is run with cmd.exe.

    "},{"location":"build_script/#environment-variables","title":"Environment variables","text":""},{"location":"build_script/#environment-variables-set-during-the-build-process","title":"Environment variables set during the build process","text":"

    During the build process, the following environment variables are set, on Windows with build.bat and on macOS and Linux with build.sh. By default, these are the only variables available to your build script. Unless otherwise noted, no variables are inherited from the shell environment in which you invoke conda-build. To override this behavior, see :ref:inherited-env-vars.

    ARCH Either 32 or 64, to specify whether the build is 32-bit or 64-bit. The value depends on the ARCH environment variable and defaults to the architecture the interpreter running conda was compiled with. CMAKE_GENERATOR The CMake generator string for the current build environment. On Linux systems, this is always Unix Makefiles. On Windows, it is generated according to the Visual Studio version activated at build time, for example, Visual Studio 9 2008 Win64. CONDA_BUILD=1 Always set to indicate that the conda-build process is running. CPU_COUNT Represents the number of CPUs on the system. SHLIB_EXT Denotes the shared library extension specific to the operating system (e.g. .so for Linux, .dylib for macOS, and .dll for Windows). HTTP_PROXY Inherited from the user's shell environment, specifying the HTTP proxy settings. HTTPS_PROXY Similar to HTTP_PROXY, this is inherited from the user's shell environment and specifies the HTTPS proxy settings. LANG Inherited from the user's shell environment, defining the system language and locale settings. MAKEFLAGS Inherited from the user's shell environment. This can be used to set additional arguments for the make command, such as -j2 to utilize 2 CPU cores for building the recipe. PY_VER Specifies the Python version against which the build is occurring. This can be modified with a variant_config.yaml file. PATH Inherited from the user's shell environment and augmented with the activated host and build prefixes. PREFIX The build prefix to which the build script should install the software. PKG_BUILDNUM Indicates the build number of the package currently being built. PKG_NAME The name of the package that is being built. PKG_VERSION The version of the package currently under construction. PKG_BUILD_STRING The complete build string of the package being built, including the hash (e.g. py311h21422ab_0). PKG_HASH Represents the hash of the package being built, excluding the leading 'h' (e.g. 21422ab). This is applicable from conda-build 3.0 onwards. PYTHON The path to the Python executable in the host prefix. Python is installed in the host prefix only when it is listed as a host requirement. R The path to the R executable in the build prefix. R is installed in the build prefix only when it is listed as a build requirement. RECIPE_DIR The directory where the recipe is located. SP_DIR The location of Python's site-packages, where Python libraries are installed. SRC_DIR The path to where the source code is unpacked or cloned. If the source file is not a recognized archive format, this directory contains a copy of the source file. STDLIB_DIR The location of Python's standard library. build_platform Represents the native subdirectory of the conda executable, indicating the platform for which the build is occurring.

    Removed from conda-build are: - NPY_VER - PY3K

    "},{"location":"build_script/#windows","title":"Windows","text":"

    Unix-style packages on Windows are built in a special Library directory under the build prefix. The environment variables listed in the following table are defined only on Windows.

    Variable Description LIBRARY_BIN <build prefix>\\Library\\bin. LIBRARY_INC <build prefix>\\Library\\include. LIBRARY_LIB <build prefix>\\Library\\lib. LIBRARY_PREFIX <build prefix>\\Library. SCRIPTS <build prefix>\\Scripts.

    Not yet supported in rattler-build:

    • CYGWIN_PREFIX
    • VS_MAJOR
    • VS_VERSION
    • VS_YEAR

    Additionally, the following variables are forwarded from the environment:

    • ALLUSERSPROFILE
    • APPDATA
    • CommonProgramFiles
    • CommonProgramFiles(x86)
    • CommonProgramW6432
    • COMPUTERNAME
    • ComSpec
    • HOMEDRIVE
    • HOMEPATH
    • LOCALAPPDATA
    • LOGONSERVER
    • NUMBER_OF_PROCESSORS
    • PATHEXT
    • ProgramData
    • ProgramFiles
    • ProgramFiles(x86)
    • ProgramW6432
    • PROMPT
    • PSModulePath
    • PUBLIC
    • SystemDrive
    • SystemRoot
    • TEMP
    • TMP
    • USERDOMAIN
    • USERNAME
    • USERPROFILE
    • windir
    • PROCESSOR_ARCHITEW6432
    • PROCESSOR_ARCHITECTURE
    • PROCESSOR_IDENTIFIER
    "},{"location":"build_script/#unix","title":"Unix","text":"

    The environment variables listed in the following table are defined only on macOS and Linux.

    Variable Description HOME Standard $HOME environment variable. PKG_CONFIG_PATH Path to pkgconfig directory, defaults to `$PREFIX/lib/pkgconfig SSL_CERT_FILE Path to SSL_CERT_FILE file. CFLAGS Empty, can be forwarded from env to set additional arguments to C compiler. CXXFLAGS Same as CFLAGS for C++ compiler. LDFLAGS Empty, additional flags to be passed to the linker when linking object files into an executable or shared object."},{"location":"build_script/#macos","title":"macOS","text":"

    The environment variables listed in the following table are defined only on macOS.

    Variable Description MACOSX_DEPLOYMENT_TARGET Same as the Anaconda Python macOS deployment target. Currently 10.9 for intel 32- and 64bit macOS, and 11.0 for arm64. OSX_ARCH i386 or x86_64 or arm64, depending on the target platform"},{"location":"build_script/#linux","title":"Linux","text":"

    The environment variable listed in the following table is defined only on Linux.

    Variable Description LD_RUN_PATH Defaults to <build prefix>/lib. QEMU_LD_PREFIX The prefix used by QEMU's user mode emulation for library paths. QEMU_UNAME Set qemu uname release string to 'uname'. DEJAGNU The path to the dejagnu testing framework used by the GCC test suite. DISPLAY The X11 display to use for graphical applications. BUILD Target triple ({build_arch}-conda_{build_distro}-linux-gnu) where build_distro is one of cos6 or cos7, for Centos 6 or 7"},{"location":"cli_usage/","title":"CLI usage","text":""},{"location":"cli_usage/#shell-completions","title":"Shell Completions","text":"

    We support shell completions through clap_complete. You can generate them for your shell using the completion command.

    You can add the completions to your shell by adding the following to your shell's configuration file:

    # For bash (add this to ~/.bashrc)\neval \"$(rattler-build completion --shell=bash)\"\n# For zsh (add this to ~/.zshrc)\neval \"$(rattler-build completion --shell=zsh)\"\n# For fish (add this to ~/.config/fish/config.fish)\nrattler-build completion --shell=fish | source\n

    Ensure that wherever you install rattler-build is pointed to by your PATH (for zsh or equivalent in other shells), after which point you can use TAB or any configured completion key of choice.

    $ rattler-build <TAB>\nbuild    -- Build a package\nhelp     -- Print this message or the help of the given subcommand(s)\nrebuild  -- Rebuild a package\ntest     -- Test a package\n\n## Package format\n\nYou can specify the package format (either `.tar.bz2` or `.conda`) by using the `--package-format` flag.\nYou can also set the compression level with `:<level>` after the package format. The `<level>` can be `max`, `min`, `default` or a number corresponding to the compression level.\n`.tar.bz2` supports compression levels between `1` and `9` while `.conda` supports compression levels between `-7` and `22`.\nFor `.conda`, you can also set the `--compression-threads` flag to specify the number of threads to use for compression.\n\n```sh\n# default\nrattler-build build --package-format tarbz2 -r recipe/recipe.yaml\n# maximum compression with 10 threads\nrattler-build build --package-format conda:max --compression-threads 10 -r recipe/recipe.yaml\n
    "},{"location":"cli_usage/#logs","title":"Logs","text":"

    rattler-build knows three different log styles: fancy, plain, and json. You can configure them with the --log-style=<style> flag:

    # default\nrattler-build build --log-style fancy -r recipe/recipe.yaml\n
    "},{"location":"cli_usage/#github-integration","title":"GitHub integration","text":"

    rattler-build also has a GitHub integration. With this integration, warnings are automatically emitted in the GitHub Actions log and a summary is generated and posted to the GitHub Actions summary page.

    To make use of this integration, we recommend using our custom GitHub action: rattler-build-action. To manually enable it, you can set the environment variable RATTLER_BUILD_ENABLE_GITHUB_INTEGRATION=true.

    "},{"location":"compilers/","title":"Compilers and cross-compilation","text":"

    To use a compiler in your project, it's best to use the ${{ compiler('lang') }} template function. The compiler function works by taking a language, determining the configured compiler for that language, and adding some information about the target platform to the selected compiler. To configure a compiler for a specific language, the variant_config.yaml file can be used.

    For example, in a recipe that uses a C-compiler, you can use the following code:

    requirements:\n  build:\n    - ${{ compiler('c') }}\n

    To set the compiler that you want to use, create a variant config that looks like the following:

    c_compiler:\n  - gcc\n\n# optionally you can specify a version\nc_compiler_version:\n  - 9.3.0\n

    When the template function is evaluated, it will look something like: gcc_linux-64 9.3.0. You can define your own compilers. For example, for Rust you can use ${{ compiler('rust') }} and rust_compiler_{version} in your variant config.

    "},{"location":"compilers/#cross-compilation","title":"Cross-compilation","text":"

    Cross-compilation is supported by rattler-build and the compiler template function is part of what makes it possible. When you want to cross-compile from linux-64 to linux-aarch64 (i.e. intel to ARM), you can pass --target-platform linux-aarch64 to the rattler-build command. This will cause the compiler template function to select a compiler that is configured for linux-aarch64. The above example would resolve to gcc_linux-aarch64 9.3.0. Provided that the package is available for linux-64 (your build platform), the compilation should succeed.

    The distinction between the build and host sections begins to make sense when thinking about cross-compilation. The build environment is resolved to packages that need to run at compilation time. For example, cmake, gcc, and autotools are all tools that need to be executed. Therefore, the build environment resolves to packages for the linux-64 architecture (in our example). On the other hand, the host packages resolve to linux-aarch64 - those are packages that we want to link against.

    # packages that need to run at build time (cmake, gcc, autotools, etc.)\n# in the platform that rattler-build is executed on (the build_platform)\nbuild:\n  - cmake\n  - ${{ compiler('c') }}\n# packages that we want to link against in the architecture we are\n# cross-compiling to the target_platform\nhost:\n  - libcurl\n  - openssl\n
    "},{"location":"experimental_features/","title":"Experimental features","text":"

    Warning

    These are experimental features of rattler-build and may change or go away completely.

    Currently only the build and rebuild commands support the following experimental features.

    To enable them, use the --experimental flag with the command. Or, use the environment variable, RATTLER_BUILD_EXPERIMENTAL=1.

    "},{"location":"experimental_features/#jinja-functions","title":"Jinja functions","text":""},{"location":"experimental_features/#load_from_filefile_path","title":"load_from_file(<file_path>)","text":"

    The Jinja function load_from_file allows loading from files; specifically, it allows loading from toml, json, and yaml file types to an object to allow it to fetch things directly from the file. It loads all other files as strings.

    "},{"location":"experimental_features/#usage","title":"Usage","text":"

    load_from_file is useful when there is a project description in a well-defined project file such as Cargo.toml, package.json, pyproject.toml, package.yaml, or stack.yaml. It enables the recipe to be preserved in as simple a state as possible, especially when there is no need to keep the changes in sync; some example use cases for this are with CI/CD infrastructure or when there is a well-defined output format.

    Below is an example loading a Cargo.toml inside of the rattler-build GitHub repository:

    recipe.yaml
    context:\n  name: ${{ load_from_file(\"Cargo.toml\").package.name }}\n  version: ${{ load_from_file(\"Cargo.toml\").package.version }}\n  source_url: ${{ load_from_file(\"Cargo.toml\").package.homepage }}\n  rust_toolchain: ${{ load_from_file(\"rust-toolchains\") }}\n\npackage:\n  name: ${{ name }}\n  version: ${{ version }}\n\nsource:\n  git: ${{ source_url }}\n  tag: ${{ source_tag }}}}\n\nrequirements:\n  build:\n    - rust ==${{ rust_toolchain }}\n\nbuild:\n  script: cargo build --release -p ${{ name }}\n\ntest:\n  - script: cargo test -p ${{ name }}\n  - script: cargo test -p rust-test -- --test-threads=1\n\nabout:\n  home: ${{ source_url }}\n  repository: ${{ source_url }}\n  documentation: ${{ load_from_file(\"Cargo.toml\").package.documentation }}\n  summary: ${{ load_from_file(\"Cargo.toml\").package.description }}\n  license: ${{ load_from_file(\"Cargo.toml\").package.license }}\n
    "},{"location":"experimental_features/#git-functions","title":"git functions","text":"

    git functions are useful for getting the latest tag and commit hash. These can be used in the context section of the recipe, to fetch version information from a repository.

    Examples
    # latest tag in the repo\ngit.latest_tag(<git_repo_url>)\n\n# latest tag revision(aka, hash of tag commit) in the repo\ngit.latest_tag_rev(<git_repo_url>)\n\n# latest commit revision(aka, hash of head commit) in the repo\ngit.head_rev(<git_repo_url>)\n
    "},{"location":"experimental_features/#usage_1","title":"Usage","text":"

    These can be useful for automating minor things inside of the recipe itself, such as if the current version is the latest version or if the current hash is the latest hash, etc.

    recipe.yaml
    context:\n  git_repo_url: \"https://github.com/prefix-dev/rattler-build\"\n  latest_tag: ${{ git.latest_tag( git_repo_url ) }}\n\npackage:\n  name: \"rattler-build\"\n  version: ${{ latest_tag }}\n\nsource:\n  git: ${{ git_repo_url }}\n  tag: ${{ latest_tag }}\n

    There is currently no guarantee of caching for repo fetches when using git functions. This may lead to some performance issues.

    "},{"location":"highlevel/","title":"What is rattler-build?","text":"

    rattler-build is a tool to build and package software so that it can be installed on any operating system \u2013 with any compatible package manager such as mamba, conda, or rattler. We are also intending for rattler-build to be used as a library to drive builds of packages from any other recipe format in the future.

    "},{"location":"highlevel/#how-does-rattler-build-work","title":"How does rattler-build work?","text":"

    Building of packages consists of several steps. It all begins with a recipe.yaml file that specifies how the package is to be built and what the dependencies are. From the recipe file, rattler-build executes several steps:

    1. Rendering:

    Parse the recipe file and evaluate conditionals, Jinja expressions, and variables, and variants.

    1. Fetch source:

    Retrieve specified source files, such as .tar.gz files, git repositories, local paths. Additionally, this step will apply patches that can be specified alongside the source file.

    1. Install build environments:

    Download and install dependencies into temporary \"host\" and \"build\" workspaces. Any dependencies that are needed at build time are installed in this step.

    1. Build source:

    Execute the build script to build/compile the source code and install it into the host environment.

    1. Prepare package files:

    Collect all files that are new in the \"host\" environment and apply some transformations if necessary; specifically, we edit the rpath on Linux and macOS to make binaries relocatable.

    1. Package:

    Bundle all the files in a package and write out any additional metadata into the info/index.json, info/about.json, and info/paths.json files. This also creates the test files that are bundled with the package.

    1. Test:

    Run any tests specified in the recipe. The package is considered done if it passes all the tests, otherwise its moved to broken/ in the output directory.

    After this process, a package is created. This package can be uploaded to somewhere like a custom prefix.dev private or public channel.

    "},{"location":"highlevel/#how-to-run-rattler-build","title":"How to run rattler-build","text":"

    Running rattler-build is straightforward. It can be done on the command line:

    rattler-build build --recipe myrecipe/recipe.yaml\n

    A custom channel that is not conda-forge (the default) can be specified like so:

    rattler-build build -c robostack --recipe myrecipe/recipe.yaml\n

    You can also use the --recipe-dir argument if you want to build all the packages in a directory:

    rattler-build build --recipe-dir myrecipes/\n
    "},{"location":"highlevel/#overview-of-a-recipeyaml","title":"Overview of a recipe.yaml","text":"

    A recipe.yaml file is separated into multiple sections and can conditionally include or exclude sections. Recipe files also support a limited amount of string interpolation with Jinja (specifically minijinja in our case).

    A simple example of a recipe file for the zlib package would look as follows:

    recipe.yaml
    # variables from the context section can be used in the rest of the recipe\n# in jinja expressions\ncontext:\n  version: 1.2.13\n\npackage:\n  name: zlib\n  version: ${{ version }}\n\nsource:\n  url: http://zlib.net/zlib-${{ version }}.tar.gz\n  sha256: b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30\n\nbuild:\n  # build numbers can be set arbitrarily\n  number: 0\n  script:\n    # build script to install the package into the $PREFIX (host prefix)\n    - if: unix\n      then:\n      - ./configure --prefix=$PREFIX\n      - make -j$CPU_COUNT\n    - if: win\n      then:\n      - cmake -G \"Ninja\" -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX%\n      - ninja install\n\nrequirements:\n  build:\n    # compiler is a special function.\n    - ${{ compiler(\"c\") }}\n    # The following two dependencies are only needed on Windows,\n    # and thus conditionally selected\n    - if: win\n      then:\n        - cmake\n        - ninja\n    - if: unix\n      then:\n        - make\n

    The sections of a recipe are:

    sections description context Defines variables that can be used in the Jinja context later in the recipe (e.g. name and version are commonly interpolated in strings) package This section defines the name and version of the package you are currently building and will be the name of the final output source Defines where the source code is going to be downloaded from and checksums build Settings for the build and the build script requirements Allows the definition of build, host, run and run-constrained dependencies"},{"location":"internals/","title":"Internals of rattler-build","text":""},{"location":"internals/#making-packages-relocatable-with-rattler-build","title":"Making Packages Relocatable with rattler-build","text":"

    Often, the most challenging aspect of building a package using rattler-build is making it relocatable. A relocatable package can be installed into any prefix, allowing it to be used outside the environment in which it was built. This is in contrast to a non-relocatable package, which can only be utilized within its original build environment.

    rattler-build automatically performs the following actions to make packages relocatable:

    1. Binary object file conversion: Binary object files are converted to use relative paths using install_name_tool on macOS and patchelf on Linux. This uses $ORIGIN for elf files on Linux and @loader_path for Mach-O files on macOS to make the rpath relative to the executable / shared library.
    2. Text file prefix registration: Any text file without NULL bytes containing the placeholder prefix have the registered prefix replaced with the install prefix.
    3. Binary file prefix detection and registration: Binary files containing the build prefix can be automatically registered. The registered files will have their build prefix replaced with the install prefix at install time. This works by padding the install prefix with null terminators, such that the length of the binary file remains the same. The build prefix must be long enough to accommodate any reasonable installation prefix. On macOS and Linux, rattler-build pads the build prefix to 255 characters by appending _placehold to the end of the build directory name.
    "},{"location":"internals/#what-goes-into-a-package","title":"What goes into a package?","text":"

    Generally speaking, any new files that are copied into the $PREFIX directory at build time are part of the new package. However, there is some filtering going on to exclude unwanted files, and noarch: python packages have special handling as well. The rules are as follows:

    "},{"location":"internals/#filtering","title":"Filtering","text":""},{"location":"internals/#general-file-filtering","title":"General File Filtering","text":"

    Certain files are filtered out to prevent them from being included in the package. These include:

    • .pyo files: Optimized Python files are not included because they are considered harmful.
    • .la files: Libtool archive files that are not needed at runtime.
    • .DS_Store files: macOS-specific files that are irrelevant to the package.
    • .git files and directories: Version control files, including .gitignore and the .git directory, which are not needed in the package.
    • share/info/dir This file is ignored because it would be written from multiple packages.
    "},{"location":"internals/#special-handling-for-noarch-python-packages","title":"Special Handling for noarch: python Packages","text":"

    For packages marked as noarch: python, special transformations are applied to ensure compatibility across different platforms:

    • Stripping Python Library Prefix: The \"lib/pythonX.X\" prefix is removed, retaining only the \"site-packages\" part of the path.
    • Skipping __pycache__ Directories and .pyc Files: These are excluded and recreated during installation (they are specific to the Python version).
    • Replacing bin and Scripts Directories:
    • On Unix systems, the bin directory is replaced with python-scripts.
    • On Windows systems, the Scripts directory is replaced with python-scripts.
    • Remove explicitly mentioned entrypoints: For noarch: python packages, entry points registered in the package are also taken into account. Files in the bin or Scripts directories that match entry points are excluded to avoid duplications.
    "},{"location":"internals/#symlink-handling","title":"Symlink Handling","text":"

    Symlinks are carefully managed to ensure they are relative rather than absolute, which aids in making the package relocatable:

    • Absolute symlinks pointing within the $PREFIX are converted to relative symlinks.
    • On Unix systems, this conversion is handled directly by creating new relative symlinks.
    • On Windows, a warning is issued since symlink creation requires administrator privileges.
    "},{"location":"package_spec/","title":"Package specification","text":"

    rattler-build produces \"conda\" packages. These packages work with the mamba and conda package managers, and they work cross-platform on Windows, Linux and macOS.

    By default, a conda package is a tar.bz2 archive which contains:

    • Metadata under the info/ directory
    • A collection of files that are installed directly into an install prefix

    The format is identical across platforms and operating systems. During the install process, all files are extracted into the install prefix, except the ones in info/. Installing a conda package into an environment is similar to executing the following commands:

    cd <environment prefix>\ntar xjf mypkg-1.0.0-h2134.tar.bz2\n

    Only files, including symbolic links, are part of a conda package. Directories are not included. Directories are created and removed as needed, but you cannot create an empty directory from the tar archive directly.

    There is also a newer archive type, suffixed with .conda. This archive type consists of an outer \"zip\" archive that is not compressed, and two inner archives that are compressed with zstd, which is very fast for decompression.

    The inner archives are split into info and pkg files, which makes it possible to extract only the info part of the archive (only the metadata), which is often smaller in size.

    "},{"location":"package_spec/#package-filename","title":"Package filename","text":"

    A conda package conforms to the following filename:

    <name>-<version>-<hash>.tar.bz2 OR <name>-<version>-<hash>.conda\n
    "},{"location":"package_spec/#special-files-in-packages","title":"Special files in packages","text":"

    There are some special files in a package:

    • activation and deactivation scripts that are executed when the environment is activated or deactivated
    • post-link and pre-unlink scripts that are executed when the package is installed or uninstalled

    You can read more about these files in the activation scripts and other special files section.

    "},{"location":"package_spec/#package-metadata","title":"Package metadata","text":"

    The info/ directory contains all metadata about a package. Files in this location are not installed under the install prefix. Although you are free to add any file to this directory, conda only inspects the content of the files discussed below:

    "},{"location":"package_spec/#infoindexjson","title":"info/index.json","text":"

    This file contains basic information about the package, such as name, version, build string, and dependencies. The content of this file is stored in repodata.json, which is the repository index file, hence the name index.json. The JSON object is a dictionary containing the keys shown below.

    name: string

    The lowercase name of the package. May contain lowercase characters, underscores, and dashes.

    version: string

    The package version. May not contain \"-\". Acknowledges PEP 440.

    build: string

    The build string. May not contain \"-\". Differentiates builds of packages with otherwise identical names and versions, such as:

    • A build with other dependencies, such as Python 3.4 instead of Python 2.7.
    • A bug fix in the build process.
    • Some different optional dependencies, such as MKL versus ATLAS linkage. Nothing in conda actually inspects the build string. Strings such as np18py34_1 are designed only for human readability and conda never parses them.
    build_number: integer

    A non-negative integer representing the build number of the package. Unlike the build string, the build_number is inspected by conda. Conda uses it to sort packages that have otherwise identical names and versions to determine the latest one. This is important because new builds that contain bug fixes for the way a package is built may be added to a repository.

    depends: list of match specs

    A list of dependency specifications, where each element is a string. These come from the run section of the recipe or any run exports of dependencies.

    constrains: list of match specs

    A list of optional dependency constraints. The packages listed under constrains are not installed by default, but if they are installed they have to respect the constraints.

    subdir: string

    The subdir (like linux-64) of this package.

    arch: string

    Optional. The architecture the package is built for. EXAMPLE: x86_64. This key is generally not used (duplicate information from sudir).

    platform: string

    Optional. The OS that the package is built for, e.g. osx. This key is generally not used (duplicate information from sudir).

    "},{"location":"package_spec/#infopathsjson","title":"info/paths.json","text":"

    The paths.json file lists all files that are installed into the environment.

    It consists of a list of path entries, each with the following keys:

    _path: string

    The relative path of the file

    path_type: optional, string

    The type of linking, can be hardlink, softlink, or directory. Default is hardlink.

    file_mode: - optional, string

    The file mode can be binary or text. This is only relevant for prefix replacement.

    prefix_placeholder: optional, string

    The prefix placeholder string that is encoded in the text or binary file, which is replaced at installation time. Note that this prefix placeholder uses / even on Windows.

    no_link: bool, optional

    Determines whether this file should be linked or not when installing the package (linking the file from the cache into the environment). Defaults to false.

    sha256: string

    The SHA256 hash of the file. For symbolic links it contains the SHA256 hash of the file pointed to.

    size_in_bytes: number

    The size, in bytes, of the file. For symbolic links, it contains the file size of the file pointed to.

    Due to the way the binary replacement works, the placeholder prefix must be longer than the install prefix.

    "},{"location":"package_spec/#infolicense","title":"info/license/<...>","text":"

    All licenses mentioned in the recipe are copied to this folder.

    "},{"location":"package_spec/#infoaboutjson","title":"info/about.json","text":"

    Optional file. Contains the entries of the \"about\" section of the recipe of the recipe.yaml file. The following keys are added to info/about.json if present in the build recipe:

    Renamed fields

    The new recipe spec renamed a few fields (from conda-build's original implementation). This means that some fields in the about.json file still have the old names (for backwards compatibility), while you would generally use different names in the recipe.

    home: url (from about.homepage)

    The URL of the homepage of the package.

    dev_url: url (from about.repository)

    The URL of the development repository of the package.

    doc_url: url (from about.documentation)

    The URL of the documentation of the package.

    license_url: url

    The URL of the license of the package.

    license: string (from about.license)

    The SPDX license identifier of the package.

    summary: string

    A short summary of the package.

    description: string

    A longer description of the package.

    license_family: string

    (this field is not used anymore as we rely on SPDX license identifiers)

    "},{"location":"package_spec/#inforecipe","title":"info/recipe/<...>","text":"

    A directory containing the full contents of the build recipe. This folder also contains a rendered version of the recipe (rendered_recipe.yaml). This rendered version is used for the rebuild command. However, note that currently this format is still in flux and can change at any time.

    You can also use --no-include-recipe to disable the inclusion of the recipe in the package.

    "},{"location":"rebuild/","title":"Rebuilding a package","text":"

    The rebuild command allows you to rebuild a package from an existing package. The main use case is to examine if a package can be rebuilt in a reproducible manner. You can read more about reproducible builds here.

    "},{"location":"rebuild/#usage","title":"Usage","text":"
    rattler-build rebuild ./mypkg-0.1.0-h60d57d3_0.tar.bz2\n
    "},{"location":"rebuild/#how-it-works","title":"How it works","text":"

    The recipe is \"rendered\" and stored into the package. The way the recipe is rendered is subject to change. For the moment, the rendered recipe is stored as info/recipe/rendered_recipe.yaml. It includes the exact package versions that were used at build time. When rebuilding, we use the package resolutions from the rendered recipe, and execute the same build script as the original package.

    We also take great care to sort files in a deterministic manner as well as erasing any time stamps. The SOURCE_DATE_EPOCH environment variable is set to the same timestamp as the original build for additional determinism (some build tools use this variable to set timestamps).

    "},{"location":"rebuild/#how-to-check-the-reproducibility-of-a-package","title":"How to check the reproducibility of a package","text":"

    There is an excellent tool called diffoscope that allows you to compare two packages and see the differences. You can install it with pixi:

    pixi global install diffoscope\n

    To compare two packages, you can use the following command:

    rattler-build rebuild ./build0.tar.bz2\ndiffoscope ./build0.tar.bz2 ./mypkg-0.1.0-h60d57d3_0.tar.bz2\n
    "},{"location":"selectors/","title":"Selectors in recipes","text":"

    Recipe and variant configuration files can utilize selectors to conditionally add, remove, or modify dependencies, configuration options, or even skip recipe execution based on specific conditions.

    Selectors are implemented using a simple if / then / else map, which is a valid YAML dictionary. The condition is evaluated using minijinja and follows the same syntax as a Python expression.

    During rendering, several variables are set based on the platform and variant being built. For example, the unix variable is true for macOS and Linux, while win is true for Windows. Consider the following recipe executed on Linux:

    requirements:\n  host:\n    - if: unix\n      then: unix-tool\n    - if: win\n      then: win-tool\n

    This will be evaluated as:

    requirements:\n  host:\n    - unix-tool\n

    The line containing the Windows-specific configuration is removed. Multiple items can also be selected, such as:

    host:\n  - if: linux\n    then:\n    - linux-tool-1\n    - linux-tool-2\n    - linux-tool-3\n

    For Linux, this will result in:

    host:\n  - linux-tool-1\n  - linux-tool-2\n  - linux-tool-3\n

    Other examples often found in the wild:

    if: build_platform != target_platform ... # true if cross-platform build\nif: osx and arm64 ... # true for apple silicon (osx-arm64)\nif: linux and (aarch64 or ppc64le)) ... # true for linux ppc64le or linux-aarch64\n
    "},{"location":"selectors/#available-variables","title":"Available variables","text":"

    The following variables are available during the initial rendering and afterward:

    Variable Description target_platform the configured target_platform for the build build_platform the build platform linux \"true\" if target_platform is Linux osx \"true\" if target_platform is OSX / macOS win \"true\" if target_platform is Windows unix \"true\" if target_platform is a Unix (macOS or Linux) x86_64, x86, arm64, ... The architecture (\"x86_64\" for 64 bit, \"x86\" for 32 bit, otherwise arm64, aarch64, ppc64le, etc.)

    After the initial phase, when the variant configuration is selected, the variant values are also available in selectors. For example, if the build uses python: 3.8 as a variant, we can use if: python == \"3.8\" to enable a dependency for only when the Python version is 3.8.

    "},{"location":"selectors/#the-cmp-function","title":"The cmp function","text":"

    Inside selectors, one can use a special cmp function to test if the selected variant version has a matching version. For example, if we have again a python: 3.8 variant, we could use the following tests:

    - if: cmp(python, \"3.8\")    # true\n  then: mydep\n- if: cmp(python, \">=3.8\")  # true\n  then: mydep\n- if: cmp(python, \"<3.8\")   # false\n  then: mydep\n

    This function eliminates the need to implement any Python-specific conda-build selectors (such as py3k, py38, etc.) or the py and npy integers.

    Please note that during the initial phase of rendering we do not know the variant, and thus the cmp condition always evaluates to true.

    "},{"location":"special_files/","title":"Activation scripts and other special files","text":"

    A conda package can contain \"special\" files in the prefix. These files are scripts that are executed during activation, installation, or uninstallation process.

    If possible, they should be avoided since they execute arbitrary code at installation time and slow down the installation and activation process.

    "},{"location":"special_files/#activation-scripts","title":"Activation scripts","text":"

    The activation scripts are executed when the environment containing the package is activated (e.g. when doing micromamba activate myenv or pixi run ...).

    The scripts are located in special folders:

    • etc/conda/activate.d/{script.sh/bat} - scripts in this folder are executed before the environment is activated
    • etc/conda/deactivate.d/{script.sh/bat} - scripts in this folder are executed when the environment is deactivated

    The scripts are executed in lexicographical order, so you can prefix them with numbers to control the order of execution.

    To add a script to the package, just make sure that you install the file in this folder. For example, on Linux:

    mkdir -p $PREFIX/etc/conda/activate.d\ncp activate-mypkg.sh $PREFIX/etc/conda/activate.d/10-activate-mypkg.sh\n\nmkdir -p $PREFIX/etc/conda/deactivate.d\ncp deactivate-mypkg.sh $PREFIX/etc/conda/deactivate.d/10-deactivate-mypkg.sh\n
    "},{"location":"special_files/#post-link-and-pre-unlink-scripts","title":"Post-link and pre-unlink scripts","text":"

    The post-link and pre-unlink scripts are executed when the package is installed or uninstalled. They are both heavily discouraged and currently not implemented in rattler, rattler-build, and pixi.

    To create a post-link script for your package, you need to add <package_name>-post-link.{sh/bat} to the bin/ folder of your package. The same is applicable for pre-unlink scripts, just with the name <package_name>-pre-unlink.{sh/bat}.

    For example, for mypkg, you would add mypkg-post-link.sh to the bin/ folder of your package.

    "},{"location":"testing/","title":"Testing packages","text":"

    When you are developing a package, you should write tests for it. The tests are automatically executed right after the package build has finished.

    The tests from the test section are actually packaged into your package and can also be executed straight from the existing package.

    The idea behind adding the tests into the package is that you can execute the tests independently from building the package. That is also why we are shipping a test subcommand that takes as input an existing package and executes the tests:

    rattler-build test --package-file ./xtensor-0.24.6-h60d57d3_0.tar.bz2\n

    Running the above command will extract the package and create a clean environment where the package and dependencies are installed. Then the tests are executed in this newly-created environment.

    If you inspect the package contents, you would find the test files under info/test/*.

    "},{"location":"testing/#how-tests-are-translated","title":"How tests are translated","text":"

    The tests section allows you to specify the following things:

    tests:\n  - script:\n      # commands to run to test the package. If any of the commands\n      # returns with an error code, the test is considered failed.\n      - echo \"Hello world\"\n      - pytest ./tests\n\n    # additional requirements at test time\n    requirements:\n      run:\n        - pytest\n\n    files:\n      # Extra files to be copied to the test directory from the \"work directory\"\n      source:\n        - tests/\n        - test.py\n        - *.sh\n      recipe:\n        - more_tests/*.py\n\n  # This test section tries to import the Python modules and errors if it can't\n  - python:\n      imports:\n        - mypkg\n        - mypkg.subpkg\n

    When you are writing a test for your package, additional files are created and added to your package. These files are placed under the info/tests/{index}/ folder for each test.

    For a script test:

    • All the files are copied straight into the test folder (under info/tests/{index}/)
    • The script is turned into a run_test.sh or run_test.bat file
    • The extra requirements are stored as a JSON file called test_time_dependencies.json

    For a Python import test:

    • A JSON file is created that is called python_test.json and stores the imports to be tested and wether to execute pip check or not. This file is placed under info/tests/{index}/

    For a downstream test:

    • A JSON file is created that is called downstream_test.json and stores the downstream tests to be executed. This file is placed under info/tests/{index}/
    "},{"location":"testing/#legacy-tests","title":"Legacy tests","text":"

    Legacy tests (from conda-build) are still supported for execution. These tests are stored as files under the info/test/ folder.

    The files are:

    • run_test.sh (Unix)
    • run_test.bat (Windows)
    • run_test.py (for the Python import tests)
    • test_time_dependencies.json (for additional dependencies at test time)

    Additionally, the info/test/ folder contains all the files specified in the test section as source_files and files. The tests are executed pointing to this directory as the current working directory.

    "},{"location":"tui/","title":"Terminal User Interface","text":"

    rattler-build offers a terminal user interface for building multiple packages and viewing the logs.

    To launch the TUI, run the build command with the --tui flag as shown below:

    $ rattler-build build -r recipe.yaml --tui\n

    Note

    rattler-build-tui is gated behind the tui feature flag to avoid extra dependencies. Build the project with --features tui arguments to enable the TUI functionality.

    "},{"location":"tui/#key-bindings","title":"Key Bindings","text":"Key Action \u23ce Build a Build all j/k Next/previous package up/down/left/right Scroll logs e Edit recipe (via $EDITOR) c, : Open command prompt (available commands: edit) q, ctrl-c, esc, Quit"},{"location":"variants/","title":"Variant configuration","text":"

    rattler-build can automatically build multiple variants of a given package. For example, a Python package might need multiple variants per Python version (especially if it is a binary package such as numpy).

    For this use case, one can specify variant configuration files. A variant configuration file has 2 special entries and a list of packages with variants. For example:

    variants.yaml
    # special entry #1, the zip keys\nzip_keys:\n- [python, numpy]\n\n# special entry #2, the pin_run_as_build key\npin_run_as_build:\n  numpy:\n    max_pin: 'x.x'\n\n# entries per package version that users are interested in\npython:\n# Note that versions are _strings_ (not numbers)\n- \"3.8\"\n- \"3.9\"\n- \"3.10\"\n\nnumpy:\n- \"1.12\"\n- \"1.12\"\n- \"1.20\"\n

    We can pass a variant configuration file to rattler-build using a command like the following:

    rattler-build build --variant-config ./variants.yaml --recipe myrecipe.yaml\n

    If we have a recipe, that has a build, host or run dependency on python we will build multiple variants of this package, one for each configured python version (\"3.8\", \"3.9\" and \"3.10\").

    For example:

    # ...\nrequirements:\n  host:\n  - python\n

    ... will be rendered as (for the first variant):

    # ...\nrequirements:\n  host:\n- python 3.8*\n

    Note that variants are only applied if the requirement doesn't specify any constraints. If the requirement would be python >3.8,<3.10 then the variant entry would be ignored.

    "},{"location":"variants/#package-hash-from-variant","title":"Package hash from variant","text":"

    You might have wondered what the role of the build string is. The build string is (if not explicitly set) computed from the variant configuration. It serves as a mechanism to discern different build configurations that produce a package with the same name and version.

    The hash is computed by dumping all of the variant configuration values that are used by a given recipe into a JSON file, and then hashing that JSON file.

    For example, in our python example, we would get a variant configuration file that looks something like:

    {\n    \"python\": \"3.8\"\n}\n

    This JSON string is then hashed with the MD5 hash algorithm, and produces the hash. For certain packages (such as Python packages) special rules exists, and the py<Major.Minor> version is prepended to the hash, so that the final hash would look something like py38h123123.

    "},{"location":"variants/#zip-keys","title":"Zip keys","text":"

    Zip keys modify how variants are combined. Usually, each variant key that has multiple entries is expanded to a build matrix. For example, if we have:

    python: [\"3.8\", \"3.9\"]\nnumpy: [\"1.12\", \"1.14\"]\n

    ...then we obtain 4 variants for a recipe that uses both numpy and python:

    - python 3.8, numpy 1.12\n- python 3.8, numpy 1.14\n- python 3.9, numpy 1.12\n- python 3.9, numpy 1.14\n

    However, if we use the zip_keys and specify:

    zip_keys: [\"python\", \"numpy\"]\npython: [\"3.8\", \"3.9\"]\nnumpy: [\"1.12\", \"1.14\"]\n

    ...then the versions are \"zipped up\" and we only get 2 variants. Note that both python and numpy need to specify the exact same number of versions to make this work.

    The resulting variants with the zip applied are:

    - python 3.8, numpy 1.12\n- python 3.9, numpy 1.14\n
    "},{"location":"variants/#pin-run-as-build","title":"Pin run as build","text":"

    The pin_run_as_build key allows the user to inject additional pins. Usually, the run_exports mechanism is used to specify constraints for runtime dependencies from build time dependencies, but pin_run_as_build offers a mechanism to override that if the package does not contain a run exports file.

    For example:

    pin_run_as_build:\n  libcurl:\n    min_pin: 'x'\n    max_pin: 'x'\n

    If we now have a recipe that uses libcurl in the host and run dependencies like:

    requirements:\n  host:\n  - libcurl\n  run:\n  - libcurl\n

    During resolution, libcurl might be evaluated to libcurl 8.0.1 h13284. Our new runtime dependency then looks like:

    requirements:\n  host:\n  - libcurl 8.0.1 h13284\n  run:\n  - libcurl >=8,<9\n
    "},{"location":"variants/#prioritizing-variants","title":"Prioritizing variants","text":"

    You might produce multiple variants for a package, but want to define a priority for a given variant. The variant with the highest priority would be the default package that is selected by the resolver.

    There are two mechanisms to make this possible: mutex packages and the down_prioritize_variant option in the recipe.

    "},{"location":"variants/#the-down_prioritize_variant-option","title":"The down_prioritize_variant option","text":"

    Note

    It is not always necessary to use the down_prioritize_variant option - only if the solver has no other way to prefer a given variant. For example, if you have a package that has multiple variants for different Python versions, the solver will automatically prefer the variant with the highest Python version.

    The down_prioritize_variant option allows you to specify a variant that should be down-prioritized. For example:

    recipe.yaml
    build:\n  variant_config:\n    use_keys:\n      # use cuda from the variant config, e.g. to build multiple CUDA variants\n      - cuda\n    # this will down-prioritize the cuda variant versus other variants of the package\n    down_prioritize_variant: ${{ 1 if cuda else 0 }}\n
    "},{"location":"variants/#mutex-packages","title":"Mutex packages","text":"

    Another way to make sure the right variants are selected are \"mutex\" packages. A mutex package is a package that is mutually exclusive. We use the fact that only one package of a given name can be installed at a time (the solver has to choose).

    A mutex package might be useful to make sure that all packages that depend on BLAS are compiled against the same BLAS implementation. The mutex package will serve the purpose that \"openblas\" and \"mkl\" can never be installed at the same time.

    We could define a BLAS mutex package like this:

    variant_config.yaml
    blas_variant:\n  - \"openblas\"\n  - \"mkl\"\n

    And then the recipe.yaml for the mutex package could look like this:

    recipe.yaml
    package:\n  name: blas_mutex\n  version: 1.0\n\nbuild:\n  string: ${{ blas_variant }}${{ hash }}_${{ build_number }}\n  variant_config:\n    # make sure that `openblas` is preferred over `mkl`\n    down_prioritize_variant: ${{ 1 if blas_variant == \"mkl\" else 0 }}\n

    This will create two package: blas_mutex-1.0-openblas and blas_mutex-1.0-mkl. Only one of these packages can be installed at a time because they share the same name. The solver will then only select one of these two packages.

    The blas package in turn should have a run_export for the blas_mutex package, so that any package that links against blas also has a dependency on the correct blas_mutex package:

    recipe.yaml
    package:\n  name: openblas\n  version: 1.0\n\nrequirements:\n  # any package depending on openblas should also depend on the correct blas_mutex package\n  run_export:\n    # Add a run export on _any_ version of the blas_mutex package whose build string starts with \"openblas\"\n    - blas_mutex * openblas*\n

    Then the recipe of a package that wants to build two variants, one for openblas and one for mkl could look like this:

    recipe.yaml
    package:\n  name: fastnumerics\n  version: 1.0\n\nrequirements:\n  host:\n    # build against both openblas and mkl\n    - ${{ blas_variant }}\n  run:\n    # implicitly adds the correct blas_mutex package through run exports\n    # - blas_mutex * ${{ blas_variant }}*\n
    "},{"location":"reference/cli/","title":"Command-Line Help for rattler-build","text":"

    This document contains the help content for the rattler-build command-line program.

    "},{"location":"reference/cli/#rattler-build","title":"rattler-build","text":"

    Usage: rattler-build [OPTIONS] [COMMAND]

    "},{"location":"reference/cli/#subcommands","title":"Subcommands:","text":"
    • build \u2014 Build a package from a recipe
    • test \u2014 Run a test for a single package
    • rebuild \u2014 Rebuild a package from a package file instead of a recipe
    • upload \u2014 Upload a package
    • completion \u2014 Generate shell completion script
    • generate-recipe \u2014 Generate a recipe from PyPI or CRAN
    • auth \u2014 Handle authentication to external channels
    "},{"location":"reference/cli/#options","title":"Options:","text":"
    • -v, --verbose

      Increase logging verbosity

    • -q, --quiet

      Decrease logging verbosity

    • --log-style <LOG_STYLE>

      Logging style

      • Default value: fancy
      • Possible values:
        • fancy: Use fancy logging output
        • json: Use JSON logging output
        • plain: Use plain logging output
    • --color <COLOR>

      Enable or disable colored output from rattler-build. Also honors the CLICOLOR and CLICOLOR_FORCE environment variable

      • Default value: auto
      • Possible values:
        • always: Always use colors
        • never: Never use colors
        • auto: Use colors when the output is a terminal
    "},{"location":"reference/cli/#build","title":"build","text":"

    Build a package from a recipe

    Usage: rattler-build build [OPTIONS]

    "},{"location":"reference/cli/#options_1","title":"Options:","text":"
    • -r, --recipe <RECIPE>

      The recipe file or directory containing recipe.yaml. Defaults to the current directory

      • Default value: .
    • --recipe-dir <RECIPE_DIR>

      The directory that contains recipes

    • --up-to <UP_TO>

      Build recipes up to the specified package

    • --build-platform <BUILD_PLATFORM>

      The build platform to use for the build (e.g. for building with emulation, or rendering)

      • Default value: linux-64
    • --target-platform <TARGET_PLATFORM>

      The target platform for the build

      • Default value: linux-64
    • -c, --channel <CHANNEL>

      Add a channel to search for dependencies in

      • Default value: conda-forge
    • -m, --variant-config <VARIANT_CONFIG>

      Variant configuration files for the build

    • --render-only

      Render the recipe files without executing the build

      • Possible values: true, false
    • --with-solve

      Render the recipe files with solving dependencies

      • Possible values: true, false
    • --keep-build

      Keep intermediate build artifacts after the build

      • Possible values: true, false
    • --no-build-id

      Don't use build id(timestamp) when creating build directory name

      • Possible values: true, false
    • --compression-threads <COMPRESSION_THREADS>

      The number of threads to use for compression (only relevant when also using --package-format conda)

    • --use-zstd

      Enable support for repodata.json.zst

      • Default value: true
      • Possible values: true, false
    • --use-bz2

      Enable support for repodata.json.bz2

      • Default value: true
      • Possible values: true, false
    • --experimental

      Enable experimental features

      • Possible values: true, false
    • --auth-file <AUTH_FILE>

      Path to an auth-file to read authentication information from

    • --tui

      Launch the terminal user interface

      • Default value: false
      • Possible values: true, false
    "},{"location":"reference/cli/#modifying-result","title":"Modifying result","text":"
    • --package-format <PACKAGE_FORMAT>

      The package format to use for the build. Can be one of tar-bz2 or conda. You can also add a compression level to the package format, e.g. tar-bz2:<number> (from 1 to 9) or conda:<number> (from -7 to 22).

      • Default value: conda
    • --no-include-recipe

      Don't store the recipe in the final package

      • Possible values: true, false
    • --no-test

      Don't run the tests after building the package

      • Default value: false
      • Possible values: true, false
    • --color-build-log

      Don't force colors in the output of the build script

      • Default value: true
      • Possible values: true, false
    • --output-dir <OUTPUT_DIR>

      Output directory for build artifacts.

      • Default value: ./output
    • --skip-existing <SKIP_EXISTING>

      Whether to skip packages that already exist in any channel If set to none, do not skip any packages, default when not specified. If set to local, only skip packages that already exist locally, default when using --skip-existing. If set toall`, skip packages that already exist in any channel

      • Default value: none
      • Possible values:
        • none: Do not skip any packages
        • local: Skip packages that already exist locally
        • all: Skip packages that already exist in any channel
    "},{"location":"reference/cli/#test","title":"test","text":"

    Run a test for a single package

    This creates a temporary directory, copies the package file into it, and then runs the indexing. It then creates a test environment that installs the package and any extra dependencies specified in the package test dependencies file.

    With the activated test environment, the packaged test files are run:

    • info/test/run_test.sh or info/test/run_test.bat on Windows * info/test/run_test.py

    These test files are written at \"package creation time\" and are part of the package.

    Usage: rattler-build test [OPTIONS] --package-file <PACKAGE_FILE>

    "},{"location":"reference/cli/#options_2","title":"Options:","text":"
    • -c, --channel <CHANNEL>

      Channels to use when testing

    • -p, --package-file <PACKAGE_FILE>

      The package file to test

    • --use-zstd

      Enable support for repodata.json.zst

      • Default value: true
      • Possible values: true, false
    • --use-bz2

      Enable support for repodata.json.bz2

      • Default value: true
      • Possible values: true, false
    • --experimental

      Enable experimental features

      • Possible values: true, false
    • --auth-file <AUTH_FILE>

      Path to an auth-file to read authentication information from

    "},{"location":"reference/cli/#modifying-result_1","title":"Modifying result","text":"
    • --output-dir <OUTPUT_DIR>

      Output directory for build artifacts.

      • Default value: ./output
    "},{"location":"reference/cli/#rebuild","title":"rebuild","text":"

    Rebuild a package from a package file instead of a recipe

    Usage: rattler-build rebuild [OPTIONS] --package-file <PACKAGE_FILE>

    "},{"location":"reference/cli/#options_3","title":"Options:","text":"
    • -p, --package-file <PACKAGE_FILE>

      The package file to rebuild

    • --no-test

      Do not run tests after building

      • Default value: false
      • Possible values: true, false
    • --use-zstd

      Enable support for repodata.json.zst

      • Default value: true
      • Possible values: true, false
    • --use-bz2

      Enable support for repodata.json.bz2

      • Default value: true
      • Possible values: true, false
    • --experimental

      Enable experimental features

      • Possible values: true, false
    • --auth-file <AUTH_FILE>

      Path to an auth-file to read authentication information from

    "},{"location":"reference/cli/#modifying-result_2","title":"Modifying result","text":"
    • --output-dir <OUTPUT_DIR>

      Output directory for build artifacts.

      • Default value: ./output
    "},{"location":"reference/cli/#upload","title":"upload","text":"

    Upload a package

    Usage: rattler-build upload [OPTIONS] [PACKAGE_FILES]... <COMMAND>

    "},{"location":"reference/cli/#subcommands_1","title":"Subcommands:","text":"
    • quetz \u2014 Upload to aQuetz server. Authentication is used from the keychain / auth-file
    • artifactory \u2014 Options for uploading to a Artifactory channel. Authentication is used from the keychain / auth-file
    • prefix \u2014 Options for uploading to a prefix.dev server. Authentication is used from the keychain / auth-file
    • anaconda \u2014 Options for uploading to a Anaconda.org server
    "},{"location":"reference/cli/#arguments","title":"Arguments:","text":"
    • <PACKAGE_FILES>

      The package file to upload

    "},{"location":"reference/cli/#options_4","title":"Options:","text":"
    • --use-zstd

      Enable support for repodata.json.zst

      • Default value: true
      • Possible values: true, false
    • --use-bz2

      Enable support for repodata.json.bz2

      • Default value: true
      • Possible values: true, false
    • --experimental

      Enable experimental features

      • Possible values: true, false
    • --auth-file <AUTH_FILE>

      Path to an auth-file to read authentication information from

    "},{"location":"reference/cli/#modifying-result_3","title":"Modifying result","text":"
    • --output-dir <OUTPUT_DIR>

      Output directory for build artifacts.

      • Default value: ./output
    "},{"location":"reference/cli/#quetz","title":"quetz","text":"

    Upload to aQuetz server. Authentication is used from the keychain / auth-file

    Usage: rattler-build upload quetz [OPTIONS] --url <URL> --channel <CHANNEL>

    "},{"location":"reference/cli/#options_5","title":"Options:","text":"
    • -u, --url <URL>

      The URL to your Quetz server

    • -c, --channel <CHANNEL>

      The URL to your channel

    • -a, --api-key <API_KEY>

      The Quetz API key, if none is provided, the token is read from the keychain / auth-file

    "},{"location":"reference/cli/#artifactory","title":"artifactory","text":"

    Options for uploading to a Artifactory channel. Authentication is used from the keychain / auth-file

    Usage: rattler-build upload artifactory [OPTIONS] --url <URL> --channel <CHANNEL>

    "},{"location":"reference/cli/#options_6","title":"Options:","text":"
    • -u, --url <URL>

      The URL to your Artifactory server

    • -c, --channel <CHANNEL>

      The URL to your channel

    • -r, --username <USERNAME>

      Your Artifactory username

    • -p, --password <PASSWORD>

      Your Artifactory password

    "},{"location":"reference/cli/#prefix","title":"prefix","text":"

    Options for uploading to a prefix.dev server. Authentication is used from the keychain / auth-file

    Usage: rattler-build upload prefix [OPTIONS] --channel <CHANNEL>

    "},{"location":"reference/cli/#options_7","title":"Options:","text":"
    • -u, --url <URL>

      The URL to the prefix.dev server (only necessary for self-hosted instances)

      • Default value: https://prefix.dev
    • -c, --channel <CHANNEL>

      The channel to upload the package to

    • -a, --api-key <API_KEY>

      The prefix.dev API key, if none is provided, the token is read from the keychain / auth-file

    "},{"location":"reference/cli/#anaconda","title":"anaconda","text":"

    Options for uploading to a Anaconda.org server

    Usage: rattler-build upload anaconda [OPTIONS] --owner <OWNER>

    "},{"location":"reference/cli/#options_8","title":"Options:","text":"
    • -o, --owner <OWNER>

      The owner of the distribution (e.g. conda-forge or your username)

    • -c, --channel <CHANNEL>

      The channel / label to upload the package to (e.g. main / rc)

      • Default value: main
    • -a, --api-key <API_KEY>

      The Anaconda API key, if none is provided, the token is read from the keychain / auth-file

    • -u, --url <URL>

      The URL to the Anaconda server

      • Default value: https://api.anaconda.org
    • -f, --force

      Replace files on conflict

      • Default value: false
      • Possible values: true, false
    "},{"location":"reference/cli/#completion","title":"completion","text":"

    Generate shell completion script

    Usage: rattler-build completion [OPTIONS]

    "},{"location":"reference/cli/#options_9","title":"Options:","text":"
    • -s, --shell <SHELL>

      Shell

      • Possible values: bash, elvish, fish, powershell, zsh
    "},{"location":"reference/cli/#generate-recipe","title":"generate-recipe","text":"

    Generate a recipe from PyPI or CRAN

    Usage: rattler-build generate-recipe <SOURCE> <PACKAGE>

    "},{"location":"reference/cli/#arguments_1","title":"Arguments:","text":"
    • <SOURCE>

      Type of package to generate a recipe for

      • Possible values:
        • pypi: Generate a recipe for a Python package from PyPI
        • cran: Generate a recipe for an R package from CRAN
    • <PACKAGE>

      Name of the package to generate

    "},{"location":"reference/cli/#auth","title":"auth","text":"

    Handle authentication to external channels

    Usage: rattler-build auth <COMMAND>

    "},{"location":"reference/cli/#subcommands_2","title":"Subcommands:","text":"
    • login \u2014 Store authentication information for a given host
    • logout \u2014 Remove authentication information for a given host
    "},{"location":"reference/cli/#login","title":"login","text":"

    Store authentication information for a given host

    Usage: rattler-build auth login [OPTIONS] <HOST>

    "},{"location":"reference/cli/#arguments_2","title":"Arguments:","text":"
    • <HOST>

      The host to authenticate with (e.g. repo.prefix.dev)

    "},{"location":"reference/cli/#options_10","title":"Options:","text":"
    • --token <TOKEN>

      The token to use (for authentication with prefix.dev)

    • --username <USERNAME>

      The username to use (for basic HTTP authentication)

    • --password <PASSWORD>

      The password to use (for basic HTTP authentication)

    • --conda-token <CONDA_TOKEN>

      The token to use on anaconda.org / quetz authentication

    "},{"location":"reference/cli/#logout","title":"logout","text":"

    Remove authentication information for a given host

    Usage: rattler-build auth logout <HOST>

    "},{"location":"reference/cli/#arguments_3","title":"Arguments:","text":"
    • <HOST>

      The host to remove authentication for

    This document was generated automatically by clap-markdown.

    "},{"location":"reference/jinja/","title":"Jinja functions that can be used in the recipe","text":"

    rattler-build comes with a couple of useful helpers that can be used in the recipe.

    "},{"location":"reference/jinja/#functions","title":"Functions","text":""},{"location":"reference/jinja/#the-compiler-function","title":"The compiler function","text":"

    The compiler function can be used to put together a compiler that works for the current platform and the compilation \"target_platform\". The syntax looks like: ${{ compiler('c') }} where 'c' signifies the programming language that is used.

    This function evaluates to <compiler>_<target_platform> <compiler_version>. For example, when compiling on linux and to linux-64, this function evaluates to gcc_linux-64.

    The values can be influenced by the variant_configuration. The <lang>_compiler and <lang>_compiler_version variables are the keys with influence. See below for an example:

    "},{"location":"reference/jinja/#usage-in-a-recipe","title":"Usage in a recipe","text":"recipe.yaml
    requirements:\n  build:\n    - ${{ compiler('c') }}\n

    With a corresponding variant_configuration:

    variant_configuration.yaml
    c_compiler:\n- clang\nc_compiler_version:\n- 9.0\n

    The variables shown above would select the clang compiler in version 9.0. Note that the final output will still contain the target_platform, so that the full compiler will read clang_linux-64 9.0 when compiling with --target-platform linux-64.

    rattler-build defines some default compilers for the following languages (inherited from conda-build):

    • c: gcc on Linux, clang on osx and vs2017 on Windows
    • cxx: gxx on Linux, clangxx on osx and vs2017 on Windows
    • fortran: gfortran on Linux, gfortran on osx and vs2017 on Windows
    • rust: rust
    "},{"location":"reference/jinja/#the-stdlib-function","title":"The stdlib function","text":"

    The stdlib function closely mirrors the compiler function. It can be used to put together a standard library that works for the current platform and the compilation \"target_platform\".

    Usage: ${{ stdlib('c') }}

    Results in <stdlib>_<target_platform> <stdlib_version>. And uses the variant variables <lang>_stdlib and <lang>_stdlib_version to influence the output.

    "},{"location":"reference/jinja/#usage-in-a-recipe_1","title":"Usage in a recipe:","text":"recipe.yaml
    requirements:\n  build:\n    # these are usually paired!\n    - ${{ compiler('c') }}\n    - ${{ stdlib('c') }}\n

    With a corresponding variant_configuration:

    variant_configuration.yaml
    # these are the values `conda-forge` uses in their pinning file\n# found at https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/main/recipe/conda_build_config.yaml\nc_stdlib:\n- sysroot\nc_stdlib_version:\n- 2.17\n
    "},{"location":"reference/jinja/#the-pin-functions","title":"The pin functions","text":"

    A pin is created based on the version input (from a subpackage or a package resolution).

    The pin functions take the following five arguments:

    • min_pin (default: \"x.x.x.x.x.x\"): The minimum pin to be used. When set to None, no lower bound is set.
    • max_pin (default: \"x\"): The maximum pin to be used. When set to None, no upper bound is set.

    These \"pins\" are applied to the version input to create the lower and upper bounds. For example, if the version is 3.10.5 with min_pin=\"x.x\", max_pin=\"x.x.x\", the lower bound will be 3.10 and the upper bound will be 3.10.6.0a0. The max_pin will increment the last selected segment of the version by 1, and append .0a0 to the end to prevent any alpha versions from being selected.

    If the last segment of the version contains a letter (e.g. 9e or 1.1.1j), then incrementing the version will set that letter to a, e.g. 9e will become 10a, and 1.1.1j will become 1.1.2a. In this case, also no 0a0 is appended to the end.

    Sometimes you want to strongly connect your outputs. This can be achieved with the following input:

    • exact=True (default: False): This will pin the version exactly to the version of the output, incl. the build string.

    To override the lower or upper bound with a hard-coded value, you can use the following input:

    • lower_bound (default: None): This will override the lower bound with the given value.
    • upper_bound (default: None): This will override the upper bound with the given value.

    Both lower_bound and upper_bound expect a valid version string (e.g. 1.2.3).

    "},{"location":"reference/jinja/#the-pin_subpackage-function","title":"The pin_subpackage function","text":"
    • ${{ pin_subpackage(\"mypkg\", min_pin=\"x.x\", max_pin=\"x.x\") }} creates a pin to another output in the recipe. With an input of 3.1.5, this would create a pin of mypkg >=3.1,<3.2.0a0.
    • ${{ pin_subpackage(\"other_output\", exact=True) }} creates a pin to another output in the recipe with an exact version.
    • ${{ pin_subpackage(\"other_output\", lower_bound=\"1.2.3\", upper_bound=\"1.2.4\") }} creates a pin to another output in the recipe with a lower bound of 1.2.3 and an upper bound of 1.2.4. This is equivalent to writing other_output >=1.2.3,<1.2.4.
    "},{"location":"reference/jinja/#the-pin_compatible-function","title":"The pin_compatible function","text":"

    The pin compatible function works exactly as the pin_subpackage function, but it pins the package in the run requirements based on the resolved package of the host or build section.

    • pin_compatible pins a package in the run requirements based on the resolved package of the host or build section.
    "},{"location":"reference/jinja/#the-cdt-function","title":"The cdt function","text":"
    • ${{ cdt(\"mypkg\") }} creates a cross-dependency to another output in the recipe.

    This function helps add Core Dependency Tree packages as dependencies by converting packages as required according to hard-coded logic. See below for an example of how this function can be used:

    # on x86_64 system\ncdt('package-name') # outputs: package-name-cos6-x86_64\n# on aarch64 system\ncdt('package-name') # outputs: package-name-cos6-aarch64\n
    "},{"location":"reference/jinja/#the-hash-variable","title":"The hash variable","text":"
    • ${{ hash }} is the variant hash and is useful in the build string computation.
    "},{"location":"reference/jinja/#the-version_to_buildstring-function","title":"The version_to_buildstring function","text":"
    • ${{ python | version_to_buildstring }} converts a version from the variant to a build string (it removes the . character and takes only the first two elements of the version).
    "},{"location":"reference/jinja/#the-env-object","title":"The env object","text":"

    You can use the env object to retrieve environment variables and forward them to your build script. There are two ways to do this:

    • ${{ env.get(\"MY_ENV_VAR\") }} will return the value of the environment variable MY_ENV_VAR or throw an error if it is not set.
    • ${{ env.get_default(\"MY_ENV_VAR\", \"default_value\") }} will return the value of the environment variable MY_ENV_VAR or \"default_value\" if it is not set.

    You can also check for the existence of an environment variable:

    • ${{ env.exists(\"MY_ENV_VAR\") }} will return true if the environment variable MY_ENV_VAR is set and false otherwise.
    "},{"location":"reference/jinja/#default-jinja-filters","title":"Default Jinja filters","text":"

    The following Jinja filters are available: lower, upper, indexing into characters (e.g. https://myurl.com/{{ name[0] }}/{{ name | lower }}_${{ version }}.tar.gz).

    Navigate to the Minijinja documentation for a list of all available built-in filters.

    "},{"location":"reference/recipe_file/","title":"The recipe spec","text":"

    rattler-build implements a new recipe spec, different from the traditional \"meta.yaml\" file used in conda-build. A recipe has to be stored as a recipe.yaml file.

    "},{"location":"reference/recipe_file/#history","title":"History","text":"

    A discussion was started on what a new recipe spec could or should look like. The fragments of this discussion can be found here.

    The reason for a new spec are:

    • make it easier to parse (i.e. \"pure YAML\"); conda-build uses a mix of comments and Jinja to achieve a great deal of flexibility, but it's hard to parse the recipe with a computer
    • iron out some inconsistencies around multiple outputs (build vs. build/script and more)
    • remove any need for recursive parsing & solving
    • finally, the initial implementation in boa relied on conda-build; rattler-build removes any dependency on Python or conda-build and reimplements everything in Rust
    "},{"location":"reference/recipe_file/#major-differences-from-conda-build","title":"Major differences from conda-build","text":"
    • recipe filename is recipe.yaml, not meta.yaml
    • outputs have less complicated behavior, keys are same as top-level recipe (e.g. build/script, not just script and package/name, not just name)
    • no implicit meta-packages in outputs
    • no full Jinja2 support: no conditional or {% set ... support, only string interpolation; variables can be set in the toplevel \"context\" which is valid YAML
    • Jinja string interpolation needs to be preceded by a dollar sign at the beginning of a string, e.g. - ${{ version }} in order for it to be valid YAML
    • selectors use a YAML dictionary style (vs. comments in conda-build). Instead of - somepkg #[osx] we use:
      if: osx\nthen:\n  - somepkg\n
    • skip instruction uses a list of skip conditions and not the selector syntax from conda-build (e.g. skip: [\"osx\", \"win and py37\"])
    "},{"location":"reference/recipe_file/#spec","title":"Spec","text":"

    The recipe spec has the following parts:

    • context: to set up variables that can later be used in Jinja string interpolation
    • package: defines name, version etc. of the top-level package
    • source: points to the sources that need to be downloaded in order to build the recipe
    • build: defines how to build the recipe and what build number to use
    • requirements: defines requirements of the top-level package
    • test: defines tests for the top-level package
    • outputs: a recipe can have multiple outputs. Each output can and should have a package, requirements and test section
    "},{"location":"reference/recipe_file/#spec-reference","title":"Spec reference","text":"

    The spec is also made available through a JSON Schema (which is used for validation). The schema (and pydantic source file) can be found in this repository: recipe-format

    To use with VSCode(yaml-plugin) and other IDEs:

    Either start the document with the following line:

    # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\n
    Or, using yaml.schemas,
    yaml.schemas: {\n  \"https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json\": \"**/recipe.yaml\",\n}\n
    Read more about this here.

    See more in the automatic linting chapter.

    "},{"location":"reference/recipe_file/#examples","title":"Examples","text":"recipe.yaml
    # this sets up \"context variables\" (in this case name and version) that\n# can later be used in Jinja expressions\ncontext:\n  version: 1.1.0\n  name: imagesize\n\n# top level package information (name and version)\npackage:\n  name: ${{ name }}\n  version: ${{ version }}\n\n# location to get the source from\nsource:\n  url: https://pypi.io/packages/source/${{ name[0] }}/${{ name }}/${{ name }}-${{ version }}.tar.gz\n  sha256: f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5\n\n# build number (should be incremented if a new build is made, but version is not incrementing)\nbuild:\n  number: 1\n  script: python -m pip install --no-deps --ignore-installed .\n\n# the requirements at build and runtime\nrequirements:\n  host:\n    - python\n    - pip\n  run:\n    - python\n\n# tests to validate that the package works as expected\ntests:\n  - python:\n      imports:\n        - imagesize\n\n# information about the package\nabout:\n  homepage: https://github.com/shibukawa/imagesize_py\n  license: MIT\n  summary: 'Getting image size from png/jpeg/jpeg2000/gif file'\n  description: |\n    This module analyzes jpeg/jpeg2000/png/gif image header and\n    return image size.\n  repository: https://github.com/shibukawa/imagesize_py\n  documentation: https://pypi.python.org/pypi/imagesize\n\n# the below is conda-forge specific!\nextra:\n  recipe-maintainers:\n    - somemaintainer\n
    "},{"location":"reference/recipe_file/#package-section","title":"Package section","text":"

    Specifies package information.

    package:\n  name: bsdiff4\n  version: \"2.1.4\"\n
    • name: The lower case name of the package. It may contain \"-\", but no spaces.
    • version: The version number of the package. Use the PEP-386 verlib conventions. Cannot contain \"-\". YAML interprets version numbers such as 1.0 as floats, meaning that 0.10 will be the same as 0.1. To avoid this, put the version number in quotes so that it is interpreted as a string.
    "},{"location":"reference/recipe_file/#source-section","title":"Source section","text":"

    Specifies where the source code of the package is coming from. The source may come from a tarball file, git, hg, or svn. It may be a local path and it may contain patches.

    "},{"location":"reference/recipe_file/#source-from-tarball-or-zip-archive","title":"Source from tarball or zip archive","text":"
    source:\n  url: https://pypi.python.org/packages/source/b/bsdiff4/bsdiff4-1.1.4.tar.gz\n  md5: 29f6089290505fc1a852e176bd276c43\n  sha1: f0a2c9a30073449cfb7d171c57552f3109d93894\n  sha256: 5a022ff4c1d1de87232b1c70bde50afbb98212fd246be4a867d8737173cf1f8f\n

    If an extracted archive contains only 1 folder at its top level, its contents will be moved 1 level up, so that the extracted package contents sit in the root of the work folder.

    "},{"location":"reference/recipe_file/#source-from-git","title":"Source from git","text":"
    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  # branch: master # note: defaults to fetching the repo's default branch\n

    You can use rev to pin the commit version directly:

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  rev: \"50a1f7ed6c168eb0815d424cba2df62790f168f0\"\n

    Or you can use the tag:

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  tag: \"1.1.4\"\n

    git can also be a relative path to the recipe directory:

    source:\n  git: ../../bsdiff4/.git\n  tag: \"1.1.4\"\n

    Futhermore, if you want to fetch just the current \"HEAD\" (this may result in non-deterministic builds), then you can use depth.

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  depth: 1 # note: the behaviour defaults to -1\n

    Note: tag or rev may not be available within commit depth range, hence we don't allow using rev or the tag and depth of them together if not set to -1.

    source:\n  git: https://github.com/ilanschnell/bsdiff4.git\n  tag: \"1.1.4\"\n  depth: 1 # error: use of `depth` with `rev` is invalid, they are mutually exclusive\n

    When you want to use git-lfs, you need to set lfs: true. This will also pull the lfs files from the repository.

    source:\n  git: ../../bsdiff4/.git\n  tag: \"1.1.4\"\n  lfs: true # note: defaults to false\n
    "},{"location":"reference/recipe_file/#source-from-a-local-path","title":"Source from a local path","text":"

    If the path is relative, it is taken relative to the recipe directory. The source is copied to the work directory before building.

      source:\n    path: ../src\n    use_gitignore: false # note: defaults to true\n

    By default, all files in the local path that are ignored by git are also ignored by rattler-build. You can disable this behavior by setting use_gitignore to false.

    "},{"location":"reference/recipe_file/#patches","title":"Patches","text":"

    Patches may optionally be applied to the source.

      source:\n    #[source information here]\n    patches:\n      - my.patch # the patch file is expected to be found in the recipe\n
    "},{"location":"reference/recipe_file/#destination-path","title":"Destination path","text":"

    Within rattler-build's work directory, you may specify a particular folder to place the source into. rattler-build will always drop you into the same folder ([build folder]/work), but it's up to you whether you want your source extracted into that folder, or nested deeper. This feature is particularly useful when dealing with multiple sources, but can apply to recipes with single sources as well.

    source:\n  #[source information here]\n  target_directory: my-destination/folder\n
    "},{"location":"reference/recipe_file/#source-from-multiple-sources","title":"Source from multiple sources","text":"

    Some software is most easily built by aggregating several pieces.

    The syntax is a list of source dictionaries. Each member of this list follows the same rules as the single source. All features for each member are supported.

    Example:

    source:\n  - url: https://package1.com/a.tar.bz2\n    target_directory: stuff\n  - url: https://package1.com/b.tar.bz2\n    target_directory: stuff\n  - git: https://github.com/mamba-org/boa\n    target_directory: boa\n

    Here, the two URL tarballs will go into one folder, and the git repo is checked out into its own space. git will not clone into a non-empty folder.

    "},{"location":"reference/recipe_file/#build-section","title":"Build section","text":"

    Specifies build information.

    Each field that expects a path can also handle a glob pattern. The matching is performed from the top of the build environment, so to match files inside your project you can use a pattern similar to the following one: \"**/myproject/**/*.txt\". This pattern will match any .txt file found in your project. Quotation marks (\"\") are required for patterns that start with a *.

    Recursive globbing using ** is also supported.

    "},{"location":"reference/recipe_file/#build-number-and-string","title":"Build number and string","text":"

    The build number should be incremented for new builds of the same version. The number defaults to 0. The build string cannot contain \"-\". The string defaults to the default rattler-build build string plus the build number.

    build:\n  number: 1\n  string: abc\n
    "},{"location":"reference/recipe_file/#dynamic-linking","title":"Dynamic linking","text":"

    This section contains settings for the shared libraries and executables.

    build:\n  dynamic_linking:\n    rpath_allowlist: [\"/usr/lib/**\"]\n
    "},{"location":"reference/recipe_file/#python-entry-points","title":"Python entry points","text":"

    The following example creates a Python entry point named \"bsdiff4\" that calls bsdiff4.cli.main_bsdiff4().

    build:\n  python:\n    entry_points:\n      - bsdiff4 = bsdiff4.cli:main_bsdiff4\n      - bspatch4 = bsdiff4.cli:main_bspatch4\n
    "},{"location":"reference/recipe_file/#script","title":"Script","text":"

    By default, rattler-build uses a build.sh file on Unix (macOS and Linux) and a build.bat file on Windows, if they exist in the same folder as the recipe.yaml file. With the script parameter you can either supply a different filename or write out short build scripts. You may need to use selectors to use different scripts for different platforms.

    build:\n  # A very simple build script\n  script: pip install .\n\n  # The build script can also be a list\n  script:\n    - pip install .\n    - echo \"hello world\"\n    - if: unix\n      then:\n        - echo \"unix\"\n
    "},{"location":"reference/recipe_file/#skipping-builds","title":"Skipping builds","text":"

    Lists conditions under which rattler-build should skip the build of this recipe. Particularly useful for defining recipes that are platform-specific. By default, a build is never skipped.

    build:\n  skip:\n    - win\n    ...\n
    "},{"location":"reference/recipe_file/#architecture-independent-packages","title":"Architecture-independent packages","text":"

    Allows you to specify \"no architecture\" when building a package, thus making it compatible with all platforms and architectures. Architecture-independent packages can be installed on any platform.

    Assigning the noarch key as generic tells conda to not try any manipulation of the contents.

    build:\n  noarch: generic\n

    noarch: generic is most useful for packages such as static JavaScript assets and source archives. For pure Python packages that can run on any Python version, you can use the noarch: python value instead:

    build:\n  noarch: python\n

    Note

    At the time of this writing, noarch packages should not make use of preprocess-selectors: noarch packages are built with the directives which evaluate to true in the platform it is built on, which probably will result in incorrect/incomplete installation in other platforms.

    "},{"location":"reference/recipe_file/#include-build-recipe","title":"Include build recipe","text":"

    The recipe and rendered recipe.yaml file are included in the package_metadata by default. You can disable this by passing --no-include-recipe on the command line.

    Note

    There are many more options in the build section. These additional options control how variants are computed, prefix replacements, and more. See the full build options for more information.

    "},{"location":"reference/recipe_file/#requirements-section","title":"Requirements section","text":"

    Specifies the build and runtime requirements. Dependencies of these requirements are included automatically.

    Versions for requirements must follow the conda/mamba match specification. See build-version-spec.

    "},{"location":"reference/recipe_file/#build","title":"Build","text":"

    Tools required to build the package.

    These packages are run on the build system and include things such as version control systems (git, svn) make tools (GNU make, Autotool, CMake) and compilers (real cross, pseudo-cross, or native when not cross-compiling), and any source pre-processors.

    Packages which provide \"sysroot\" files, like the CDT packages (see below), also belong in the build section.

    requirements:\n  build:\n    - git\n    - cmake\n
    "},{"location":"reference/recipe_file/#host","title":"Host","text":"

    Represents packages that need to be specific to the target platform when the target platform is not necessarily the same as the native build platform. For example, in order for a recipe to be \"cross-capable\", shared libraries requirements must be listed in the host section, rather than the build section, so that the shared libraries that get linked are ones for the target platform, rather than the native build platform. You should also include the base interpreter for packages that need one. In other words, a Python package would list python here and an R package would list mro-base or r-base.

    requirements:\n  build:\n    - ${{ compiler('c') }}\n    - if: linux\n      then:\n        - ${{ cdt('xorg-x11-proto-devel') }}\n  host:\n    - python\n

    Note

    When both \"build\" and \"host\" sections are defined, the build section can be thought of as \"build tools\" - things that run on the native platform, but output results for the target platform (e.g. a cross-compiler that runs on linux-64, but targets linux-armv7).

    The PREFIX environment variable points to the host prefix. With respect to activation during builds, both the host and build environments are activated. The build prefix is activated before the host prefix so that the host prefix has priority over the build prefix. Executables that don't exist in the host prefix should be found in the build prefix.

    The build and host prefixes are always separate when both are defined, or when ${{ compiler() }} Jinja2 functions are used. The only time that build and host are merged is when the host section is absent, and no ${{ compiler() }} Jinja2 functions are used in meta.yaml.

    "},{"location":"reference/recipe_file/#run","title":"Run","text":"

    Packages required to run the package.

    These are the dependencies that are installed automatically whenever the package is installed. Package names should follow the package match specifications.

    requirements:\n  run:\n    - python\n    - six >=1.8.0\n

    To build a recipe against different versions of NumPy and ensure that each version is part of the package dependencies, list numpy as a requirement in recipe.yaml and use a conda_build_config.yaml file with multiple NumPy versions.

    "},{"location":"reference/recipe_file/#run-constraints","title":"Run constraints","text":"

    Packages that are optional at runtime but must obey the supplied additional constraint if they are installed.

    Package names should follow the package match specifications.

    requirements:\n  run_constraints:\n    - optional-subpackage ==${{ version }}\n

    For example, let's say we have an environment that has package \"a\" installed at version 1.0. If we install package \"b\" that has a run_constraints entry of \"a >1.0\", then mamba would need to upgrade \"a\" in the environment in order to install \"b\".

    This is especially useful in the context of virtual packages, where the run_constraints dependency is not a package that mamba manages, but rather a virtual package that represents a system property that mamba can't change. For example, a package on Linux may impose a run_constraints dependency on __glibc >=2.12. This is the version bound consistent with CentOS 6. Software built against glibc 2.12 will be compatible with CentOS 6. This run_constraints dependency helps mamba, conda or pixi tell the user that a given package can't be installed if their system glibc version is too old.

    "},{"location":"reference/recipe_file/#run-exports","title":"Run exports","text":"

    Packages may have runtime requirements such as shared libraries (e.g. zlib), which are required for linking at build time, and for resolving the link at run time. Such packages use run_exports for defining the runtime requirements to let the dependent packages understand the runtime requirements of the package.

    Example from zlib:

      requirements:\n    run_exports:\n      - ${{ pin_subpackage('libzlib', exact=True) }}\n

    Run exports are weak by default. But you can also define strong run_exports.

      requirements:\n    run_exports:\n      strong:\n        - ${{ pin_subpackage('libzlib', exact=True) }}\n
    "},{"location":"reference/recipe_file/#ignore-run-exports","title":"Ignore run exports","text":"

    There maybe cases where an upstream package has a problematic run_exports constraint. You can ignore it in your recipe by listing the upstream package name in the ignore_run_exports section in requirements.

    You can ignore them by package name, or by naming the runtime dependency directly.

      requirements:\n    ignore_run_exports:\n      from_package:\n        - zlib\n

    Using a runtime depenedency name:

      requirements:\n    ignore_run_exports:\n      from_name:\n        - libzlib\n

    Note

    ignore_run_exports only applies to runtime dependencies coming from an upstream package.

    "},{"location":"reference/recipe_file/#tests-section","title":"Tests section","text":"

    rattler-build supports four different types of tests. The \"script test\" installs the package and runs a list of commands. The \"Python test\" attempts to import a list of Python modules and runs pip check. The \"downstream test\" runs the tests of a downstream package that reverse depends on the package being built. And lastly, the \"package content test\" checks if the built package contains the mentioned items.

    The tests section is a list of these items:

    tests:\n  - script:\n      - echo \"hello world\"\n    requirements:\n      run:\n        - pytest\n    files:\n      source:\n        - test-data.txt\n\n  - python:\n      imports:\n        - bsdiff4\n      pip_check: true  # this is the default\n  - downstream: numpy\n
    "},{"location":"reference/recipe_file/#script-test","title":"Script test","text":"

    The script test has 3 top-level keys: script, files and requirements. Only the script key is required.

    "},{"location":"reference/recipe_file/#test-commands","title":"Test commands","text":"

    Commands that are run as part of the test.

    tests:\n  - script:\n      - echo \"hello world\"\n      - bsdiff4 -h\n      - bspatch4 -h\n
    "},{"location":"reference/recipe_file/#extra-test-files","title":"Extra test files","text":"

    Test files that are copied from the source work directory into the temporary test directory and are needed during testing (note that the source work directory is otherwise not available at all during testing).

    You can also include files that come from the recipe folder. They are copied into the test directory as well.

    At test execution time, the test directory is the current working directory.

    tests:\n  - script:\n      - ls\n    files:\n      source:\n        - myfile.txt\n        - tests/\n        - some/directory/pattern*.sh\n      recipe:\n        - extra-file.txt\n
    "},{"location":"reference/recipe_file/#test-requirements","title":"Test requirements","text":"

    In addition to the runtime requirements, you can specify requirements needed during testing. The runtime requirements that you specified in the \"run\" section described above are automatically included during testing (because the built package is installed as it regularly would be).

    In the build section you can specify additional requirements that are only needed on the build system for cross-compilation (e.g. emulators or compilers).

    tests:\n  - script:\n      - echo \"hello world\"\n    requirements:\n      build:\n        - myemulator\n      run:\n        - nose\n
    "},{"location":"reference/recipe_file/#python-tests","title":"Python tests","text":"

    For this test type you can list a set of Python modules that need to be importable. The test will fail if any of the modules cannot be imported.

    The test will also automatically run pip check to check for any broken dependencies. This can be disabled by setting pip_check: false in the YAML.

    tests:\n  - python:\n      imports:\n        - bsdiff4\n        - bspatch4\n      pip_check: true  # can be left out because this is the default\n

    Internally this will write a small Python script that imports the modules:

    import bsdiff4\nimport bspatch4\n
    "},{"location":"reference/recipe_file/#check-for-package-contents","title":"Check for package contents","text":"

    Checks if the built package contains the mentioned items. These checks are executed directly at the end of the build process to make sure that all expected files are present in the package.

    tests:\n  - package_contents:\n      # checks for the existence of files inside $PREFIX or %PREFIX%\n      # or, checks that there is at least one file matching the specified `glob`\n      # pattern inside the prefix\n      files:\n        - etc/libmamba/test.txt\n        - etc/libmamba\n        - etc/libmamba/*.mamba.txt\n\n      # checks for the existence of `mamba/api/__init__.py` inside of the\n      # Python site-packages directory (note: also see Python import checks)\n      site_packages:\n        - mamba.api\n\n\n      # looks in $PREFIX/bin/mamba for unix and %PREFIX%\\Library\\bin\\mamba.exe on Windows\n      # note: also check the `commands` and execute something like `mamba --help` to make\n      # sure things work fine\n      bin:\n        - mamba\n\n      # searches for `$PREFIX/lib/libmamba.so` or `$PREFIX/lib/libmamba.dylib` on Linux or macOS,\n      # on Windows for %PREFIX%\\Library\\lib\\mamba.dll & %PREFIX%\\Library\\bin\\mamba.bin\n      lib:\n        - mamba\n\n      # searches for `$PREFIX/include/libmamba/mamba.hpp` on unix, and\n      # on Windows for `%PREFIX%\\Library\\include\\libmamba\\mamba.hpp`\n      include:\n        - libmamba/mamba.hpp\n
    "},{"location":"reference/recipe_file/#downstream-tests","title":"Downstream tests","text":"

    Warning

    Downstream tests are not yet implemented in rattler-build.

    A downstream test can mention a single package that has a dependency on the package being built. The test will install the package and run the tests of the downstream package with our current package as a dependency.

    Sometimes downstream packages do not resolve. In this case, the test is ignored.

    tests:\n  - downstream: numpy\n
    "},{"location":"reference/recipe_file/#outputs-section","title":"Outputs section","text":"

    Explicitly specifies packaging steps. This section supports multiple outputs, as well as different package output types. The format is a list of mappings.

    When using multiple outputs, certain top-level keys are \"forbidden\": package and requirements. Instead of package, a top-level recipe key can be defined. The recipe.name is ignored but the recipe.version key is used as default version for each output. Other \"top-level\" keys are merged into each output (e.g. the about section) to avoid repetition. Each output is a complete recipe, and can have its own build, requirements, and test sections.

    recipe:\n  # the recipe name is ignored\n  name: some\n  version: 1.0\n\noutputs:\n  - package:\n      # version is taken from recipe.version (1.0)\n      name: some-subpackage\n\n  - package:\n      name: some-other-subpackage\n      version: 2.0\n

    Each output acts like an independent recipe and can have their own script, build_number, and so on.

    outputs:\n  - package:\n      name: subpackage-name\n    build:\n      script: install-subpackage.sh\n

    Each output is built independently. You should take care of not packaging the same files twice.

    "},{"location":"reference/recipe_file/#subpackage-requirements","title":"Subpackage requirements","text":"

    Like a top-level recipe, a subpackage may have zero or more dependencies listed as build, host or run requirements.

    The dependencies listed as subpackage build requirements are available only during the packaging phase of that subpackage.

    outputs:\n  - package:\n      name: subpackage-name\n    requirements:\n      build:\n        - some-dep\n      run:\n        - some-dep\n

    You can also use the pin_subpackage function to pin another output from the same recipe.

    outputs:\n  - package:\n      name: libtest\n  - package:\n      name: test\n    requirements:\n      build:\n        - ${{ pin_subpackage('libtest', max_pin='x.x') }}\n

    The outputs are topologically sorted by the dependency graph which is taking the pin_subpackage invocations into account. When using pin_subpackage(name, exact=True) a special behavior is used where the name package is injected as a \"variant\" and the variant matrix is expanded appropriately. For example, when you have the following situation, with a variant_config.yaml file that contains openssl: [1, 3]:

    outputs:\n  - package:\n      name: libtest\n    requirements:\n      host:\n        - openssl\n  - package:\n      name: test\n    requirements:\n      build:\n        - ${{ pin_subpackage('libtest', exact=True) }}\n

    Due to the variant config file, this will build two versions of libtest. We will also build two versions of test, one that depends on libtest (openssl 1) and one that depends on libtest (openssl 3).

    "},{"location":"reference/recipe_file/#about-section","title":"About section","text":"

    Specifies identifying information about the package. The information displays in the package server.

    about:\n  homepage: https://example.com/bsdiff4\n  license: BSD-3-Clause # (1)!\n  license_file: LICENSE\n  summary: binary diff and patch using the BSDIFF4-format\n  description: |\n    Long description of bsdiff4 ...\n  repository: https://github.com/ilanschnell/bsdiff4\n  documentation: https://docs.com\n
    1. Only the SPDX specifiers are allowed, more info here: SPDX If you want another license type LicenseRef-<YOUR-LICENSE> can be used, e.g. license: LicenseRef-Proprietary
    "},{"location":"reference/recipe_file/#license-file","title":"License file","text":"

    Adds a file containing the software license to the package metadata. Many licenses require the license statement to be distributed with the package. The filename is relative to the source or recipe directory. The value can be a single filename or a YAML list for multiple license files. Values can also point to directories with license information. Directory entries must end with a / suffix (this is to lessen unintentional inclusion of non-license files; all the directory's contents will be unconditionally and recursively added).

    about:\n  license_file:\n    - LICENSE\n    - vendor-licenses/\n
    "},{"location":"reference/recipe_file/#extra-section","title":"Extra section","text":"

    A schema-free area for storing non-conda-specific metadata in standard YAML form.

    Example: To store recipe maintainers information
    extra:\n  maintainers:\n   - name of maintainer\n
    "},{"location":"reference/recipe_file/#templating-with-jinja","title":"Templating with Jinja","text":"

    rattler-build supports limited Jinja templating in the recipe.yaml file.

    You can set up Jinja variables in the context section:

    context:\n  name: \"test\"\n  version: \"5.1.2\"\n  # later keys can reference previous keys\n  # and use jinja functions to compute new values\n  major_version: ${{ version.split('.')[0] }}\n

    Later in your recipe.yaml you can use these values in string interpolation with Jinja:

    source:\n  url: https://github.com/mamba-org/${{ name }}/v${{ version }}.tar.gz\n

    Jinja has built-in support for some common string manipulations.

    In rattler-build, complex Jinja is completely disallowed as we try to produce YAML that is valid at all times. So you should not use any {% if ... %} or similar Jinja constructs that produce invalid YAML. Furthermore, instead of plain double curly brackets Jinja statements need to be prefixed by $, e.g. ${{ ... }}:

    package:\n  name: {{ name }}   # WRONG: invalid yaml\n  name: ${{ name }} # correct\n

    For more information, see the Jinja template documentation and the list of available environment variables env-vars.

    Jinja templates are evaluated during the build process.

    "},{"location":"reference/recipe_file/#additional-jinja2-functionality-in-rattler-build","title":"Additional Jinja2 functionality in rattler-build","text":"

    Besides the default Jinja2 functionality, additional Jinja functions are available during the rattler-build process: pin_compatible, pin_subpackage, and compiler.

    The compiler function takes c, cxx, fortran and other values as argument and automatically selects the right (cross-)compiler for the target platform.

    build:\n  - ${{ compiler('c') }}\n

    The pin_subpackage function pins another package produced by the recipe with the supplied parameters.

    Similarly, the pin_compatible function will pin a package according to the specified rules.

    "},{"location":"reference/recipe_file/#pin-expressions","title":"Pin expressions","text":"

    rattler-build knows pin expressions. A pin expression can have a min_pin, max_pin and exact value. A max_pin and min_pin are specified with a string containing only x and ., e.g. max_pin=\"x.x.x\" would signify to pin the given package to <1.2.3 (if the package version is 1.2.2, for example).

    A pin with min_pin=\"x.x\",max_pin=\"x.x\" for a package of version 1.2.2 would evaluate to >=1.2.2,<1.2.3.

    If exact=true, then the hash is included, and the package is pinned exactly, e.g. ==1.2.2 h1234. This is a unique package variant that cannot exist more than once, and thus is \"exactly\" pinned.

    "},{"location":"reference/recipe_file/#pin-subpackage","title":"Pin subpackage","text":"

    Pin subpackage refers to another package from the same recipe file. It is commonly used in the build/run_exports section to export a run export from the package, or with multiple outputs to refer to a previous build.

    It looks something like:

    package:\n  name: mypkg\n  version: \"1.2.3\"\n\nrequirements:\n  run_exports:\n    # this will evaluate to `mypkg <1.3`\n    - ${{ pin_subpackage(name, max_pin='x.x') }}\n
    "},{"location":"reference/recipe_file/#pin-compatible","title":"Pin compatible","text":"

    Pin compatible lets you pin a package based on the version retrieved from the variant file (if the pinning from the variant file needs customization).

    For example, if the variant specifies a pin for numpy: 1.11, one can use pin_compatible to relax it:

    requirements:\n  host:\n    # this will select nupy 1.11\n    - numpy\n  run:\n    # this will export `numpy >=1.11,<2`, instead of the stricter `1.11` pin\n    - ${{ pin_compatible('numpy', min_pin='x.x', max_pin='x') }}\n
    "},{"location":"reference/recipe_file/#the-env-jinja-functions","title":"The env Jinja functions","text":"

    You can access the current environment variables using the env object in Jinja.

    There are three functions:

    • env.get(\"ENV_VAR\") will insert the value of \"ENV_VAR\" into the recipe.
    • env.get_default(\"ENV_VAR\", \"undefined\") will insert the value of \"ENV_VAR\" into the recipe or, if \"ENV_VAR\" is not defined, the specified default value (in this case \"undefined\")
    • env.exists(\"ENV_VAR\") returns a boolean true of false if the env var is set to any value

    This can be used for some light templating, for example:

    build:\n  string: ${{ env.get(\"GIT_BUILD_STRING\") }}_${{ PKG_HASH }}\n
    "},{"location":"reference/recipe_file/#cmp-function","title":"cmp function","text":"

    This function matches the first argument (the package's MatchSpec) against the second argument (the version spec) and returns the resulting boolean.

    cmp(python, '>=3.4')\n

    Example: cmp usage example

    "},{"location":"reference/recipe_file/#cdt-function","title":"cdt function","text":"

    This function helps add Core Dependency Tree packages as dependencies by converting packages as required according to hard-coded logic.

    # on x86_64 system\ncdt('package-name') # outputs: package-name-cos6-x86_64\n# on aarch64 system\ncdt('package-name') # outputs: package-name-cos6-aarch64\n

    Example: cdt usage example

    "},{"location":"reference/recipe_file/#preprocessing-selectors","title":"Preprocessing selectors","text":"

    You can add selectors to any item, and the selector is evaluated in a preprocessing stage. If a selector evaluates to true, the item is flattened into the parent element. If a selector evaluates to false, the item is removed.

    Selectors can use if ... then ... else as follows:

    source:\n  - if: not win\n    then:\n      - url: http://path/to/unix/source\n    else:\n      - url: http://path/to/windows/source\n\n# or the equivalent with two if conditions:\n\nsource:\n  - if: unix\n    then:\n      - url: http://path/to/unix/source\n  - if: win\n    then:\n      - url: http://path/to/windows/source\n

    A selector is a valid Python statement that is executed. You can read more about them in the \"Selectors in recipes\" chapter.

    The use of the Python version selectors, py27, py34, etc. is discouraged in favor of the more general comparison operators. Additional selectors in this series will not be added to conda-build.

    Because the selector is any valid Python expression, complicated logic is possible:

    - if: unix and not win\n  then: ...\n- if: (win or linux) and not py27\n  then: ...\n

    Lists are automatically \"merged\" upwards, so it is possible to group multiple items under a single selector:

    tests:\n  - script:\n    - if: unix\n      then:\n      - test -d ${PREFIX}/include/xtensor\n      - test -f ${PREFIX}/lib/cmake/xtensor/xtensorConfigVersion.cmake\n    - if: win\n      then:\n      - if not exist %LIBRARY_PREFIX%\\include\\xtensor\\xarray.hpp (exit 1)\n      - if not exist %LIBRARY_PREFIX%\\lib\\cmake\\xtensor\\xtensorConfigVersion.cmake (exit 1)\n\n# On unix this is rendered to:\ntests:\n  - script:\n    - test -d ${PREFIX}/include/xtensor\n    - test -f ${PREFIX}/lib/cmake/xtensor/xtensorConfigVersion.cmake\n
    "},{"location":"reference/recipe_file/#experimental-features","title":"Experimental features","text":"

    Warning

    These are experimental features of rattler-build and may change or go away completely.

    "},{"location":"reference/recipe_file/#jinja-functions","title":"Jinja functions","text":"
    • load_from_file
    • git.* functions
    "},{"location":"tutorials/cpp/","title":"Packaging a C++ package","text":"

    This tutorial will guide you though making a C++ package with rattler-build.

    "},{"location":"tutorials/cpp/#building-a-header-only-library","title":"Building a Header-only Library","text":"

    To build a package for the header-only library xtensor, you need to manage dependencies and ensure proper installation paths.

    "},{"location":"tutorials/cpp/#key-steps","title":"Key Steps","text":"
    1. Dependencies: Ensure cmake, ninja, and a compiler are available as dependencies.

    2. CMake Installation Prefix: Use the CMAKE_INSTALL_PREFIX setting to instruct CMake to install the headers in the correct location.

    3. Unix Systems: Follow the standard Unix prefix:

      $PREFIX/include\n$PREFIX/lib\n

    4. Windows Systems: Use a Unix-like prefix but nested in a Library directory:

      $PREFIX/Library/include\n$PREFIX/Library/lib\n
      Utilize the handy variables %LIBRARY_PREFIX% and %LIBRARY_BIN% to guide CMake to install the headers and libraries correctly.

    This approach ensures that the headers and libraries are installed in the correct directories on both Unix and Windows systems.

    "},{"location":"tutorials/cpp/#recipe","title":"Recipe","text":"recipe.yaml
    context:\n  version: \"0.24.6\"\n\npackage:\n  name: xtensor\n  version: ${{ version }}\n\nsource:\n  url: https://github.com/xtensor-stack/xtensor/archive/${{ version }}.tar.gz\n  sha256: f87259b51aabafdd1183947747edfff4cff75d55375334f2e81cee6dc68ef655\n\nbuild:\n  number: 0\n  script:\n    - if: win # (1)!\n      then: |\n        cmake -GNinja \\\n            -D BUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% \\\n            %SRC_DIR%\n        ninja install\n      else: |\n        cmake ${CMAKE_ARGS} -GNinja -DBUILD_TESTS=OFF \\\n              -DCMAKE_INSTALL_PREFIX=$PREFIX \\\n              $SRC_DIR\n        ninja install\n\nrequirements:\n  build:\n    - ${{ compiler('cxx') }} # (2)!\n    - cmake\n    - ninja\n  host:\n    - xtl >=0.7,<0.8\n  run:\n    - xtl >=0.7,<0.8\n  run_constraints: # (3)!\n    - xsimd >=8.0.3,<10 \n\ntests:\n  - package_contents:\n      include: # (4)!\n        - xtensor/xarray.hpp\n      files: # (5)!\n        - share/cmake/xtensor/xtensorConfig.cmake\n        - share/cmake/xtensor/xtensorConfigVersion.cmake\n\nabout:\n  homepage: https://github.com/xtensor-stack/xtensor\n  license: BSD-3-Clause\n  license_file: LICENSE\n  summary: The C++ tensor algebra library\n  description: Multi dimensional arrays with broadcasting and lazy computing\n  documentation: https://xtensor.readthedocs.io\n  repository: https://github.com/xtensor-stack/xtensor\n\nextra:\n  recipe-maintainers:\n    - some-maintainer\n
    1. The if: condition allows the user to switch behavior of the build based on some checks like, the operating system.
    2. The compiler function is used to get the C++ compiler for the build system.
    3. The run_constraints section specifies the version range of a package which the package can run \"with\". But which the package doesn't depend on itself.
    4. The include section specifies the header file to tested for existence.
    5. The files section specifies the files to be tested for existence, using a glob pattern.
    "},{"location":"tutorials/cpp/#building-a-c-application","title":"Building A C++ application","text":"

    In this example, we'll build poppler, a C++ application for manipulating PDF files from the command line. The final package will install several tools into the bin/ folder. We'll use external build scripts and run actual scripts in the test.

    "},{"location":"tutorials/cpp/#key-steps_1","title":"Key Steps","text":"
    1. Dependencies:

      • Build Dependencies: These are necessary for the building process, including cmake, ninja, and pkg-config.
      • Host Dependencies: These are the libraries poppler links against, such as cairo, fontconfig, freetype, glib, and others.
    2. Compiler Setup: We use the compiler function to obtain the appropriate C and C++ compilers.

    3. Build Script: The build.script field points to an external script (poppler-build.sh) which contains the build commands.

    4. Testing: Simple tests are included to verify that the installed tools (pdfinfo, pdfunite, pdftocairo) are working correctly by running them, and expecting an exit code 0.

    "},{"location":"tutorials/cpp/#recipe_1","title":"Recipe","text":"recipe.yaml
    context:\n  version: \"24.01.0\"\n\npackage:\n  name: poppler\n  version: ${{ version }}\n\nsource:\n  url: https://poppler.freedesktop.org/poppler-${{ version }}.tar.xz\n  sha256: c7def693a7a492830f49d497a80cc6b9c85cb57b15e9be2d2d615153b79cae08\n\nbuild:\n  script: poppler-build.sh\n\nrequirements:\n  build:\n    - ${{ compiler('c') }} # (1)!\n    - ${{ compiler('cxx') }}\n    - pkg-config\n    - cmake\n    - ninja\n  host:\n    - cairo # (2)!\n    - fontconfig\n    - freetype\n    - glib\n    - libboost-headers\n    - libjpeg-turbo\n    - lcms2\n    - libiconv\n    - libpng\n    - libtiff\n    - openjpeg\n    - zlib\n\ntests:\n  - script:\n      - pdfinfo -listenc  # (3)!\n      - pdfunite --help\n      - pdftocairo --help\n
    1. The compiler jinja function to get the correct compiler for C and C++ on the build system.
    2. These are all the dependencies that the library links against.
    3. The script test just executes some of the installed tools to check if they are working. These can be as complex as you want. (bash or cmd.exe)
    "},{"location":"tutorials/cpp/#external-build-script","title":"External Build Script","text":"

    We've defined an external build script in the recipe. This will be searched next to the recipe by the file name given, or the default name build.sh on unix or build.bat on windows are searched for.

    poppler-build.sh
    #! /bin/bash\n\nextra_cmake_args=(\n    -GNinja\n    -DCMAKE_INSTALL_LIBDIR=lib\n    -DENABLE_UNSTABLE_API_ABI_HEADERS=ON\n    -DENABLE_GPGME=OFF\n    -DENABLE_LIBCURL=OFF\n    -DENABLE_LIBOPENJPEG=openjpeg2\n    -DENABLE_QT6=OFF\n    -DENABLE_QT5=OFF\n    -DENABLE_NSS3=OFF\n)\n\nmkdir build && cd build\n\ncmake ${CMAKE_ARGS} \"${extra_cmake_args[@]}\" \\\n    -DCMAKE_PREFIX_PATH=$PREFIX \\\n    -DCMAKE_INSTALL_PREFIX=$PREFIX \\\n    -DTIFF_INCLUDE_DIR=$PREFIX/include \\\n    $SRC_DIR\n\nninja\n\n# The `install` command will take care of copying the files to the right place\nninja install\n
    "},{"location":"tutorials/cpp/#parsing-the-rattler-build-build-output","title":"Parsing the rattler-build build Output","text":"

    When running the rattler-build command, you might notice some interesting information in the output. Our package will have some run dependencies, even if we didn't specify any.

    These come from the run-exports of the packages listed in the host section of the recipe. This is indicated by \"RE of [host: package]\" in the output.

    For example, libcurl specifies that if you depend on it in the host section, you should also depend on it during runtime with specific version ranges. This ensures proper linking to shared libraries.

    Run dependencies:\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u252c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n\u2502 Name                  \u2506 Spec                                         \u2502\n\u255e\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2561\n\u2502 libcurl               \u2506 >=8.5.0,<9.0a0 (RE of [host: libcurl])       \u2502\n\u2502 fontconfig            \u2506 >=2.14.2,<3.0a0 (RE of [host: fontconfig])   \u2502\n\u2502 fonts-conda-ecosystem \u2506 (RE of [host: fontconfig])                   \u2502\n\u2502 lcms2                 \u2506 >=2.16,<3.0a0 (RE of [host: lcms2])          \u2502\n\u2502 gettext               \u2506 >=0.21.1,<1.0a0 (RE of [host: gettext])      \u2502\n\u2502 freetype              \u2506 >=2.12.1,<3.0a0 (RE of [host: freetype])     \u2502\n\u2502 openjpeg              \u2506 >=2.5.0,<3.0a0 (RE of [host: openjpeg])      \u2502\n\u2502 libiconv              \u2506 >=1.17,<2.0a0 (RE of [host: libiconv])       \u2502\n\u2502 cairo                 \u2506 >=1.18.0,<2.0a0 (RE of [host: cairo])        \u2502\n\u2502 libpng                \u2506 >=1.6.42,<1.7.0a0 (RE of [host: libpng])     \u2502\n\u2502 libzlib               \u2506 >=1.2.13,<1.3.0a0 (RE of [host: zlib])       \u2502\n\u2502 libtiff               \u2506 >=4.6.0,<4.7.0a0 (RE of [host: libtiff])     \u2502\n\u2502 libjpeg-turbo         \u2506 >=3.0.0,<4.0a0 (RE of [host: libjpeg-turbo]) \u2502\n\u2502 libglib               \u2506 >=2.78.3,<3.0a0 (RE of [host: glib])         \u2502\n\u2502 libcxx                \u2506 >=16 (RE of [build: clangxx_osx-arm64])      \u2502\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2534\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n

    You can also see \"linking\" information in the output, for example on macOS:

    [lib/libpoppler-glib.8.26.0.dylib] links against:\n \u251c\u2500 @rpath/libgio-2.0.0.dylib\n \u251c\u2500 @rpath/libgobject-2.0.0.dylib\n \u251c\u2500 /usr/lib/libSystem.B.dylib\n \u251c\u2500 @rpath/libglib-2.0.0.dylib\n \u251c\u2500 @rpath/libpoppler.133.dylib\n \u251c\u2500 @rpath/libfreetype.6.dylib\n \u251c\u2500 @rpath/libc++.1.dylib\n \u251c\u2500 @rpath/libpoppler-glib.8.dylib\n \u2514\u2500 @rpath/libcairo.2.dylib\n

    rattler-build ensures that:

    1. All shared libraries linked against are present in the run dependencies. Missing libraries trigger an overlinking warning.
    2. You don't require any packages in the host that you are not linking against. This triggers an overdepending warning.
    "},{"location":"tutorials/python/","title":"Writing a Python package","text":"

    Writing a Python package is fairly straightforward, especially for \"Python-only\" packages. In the second example we will build a package for numpy which contains compiled code.

    "},{"location":"tutorials/python/#a-python-only-package","title":"A Python-only package","text":"

    The following recipe uses the noarch: python setting to build a noarch package that can be installed on any platform without modification. This is very handy for packages that are pure Python and do not contain any compiled extensions.

    Additionally, noarch: python packages work with a range of Python versions (contrary to packages with compiled extensions that are tied to a specific Python version).

    recipe.yaml
    context:\n  version: \"8.1.2\"\n\npackage:\n  name: ipywidgets\n  version: ${{ version }}\n\nsource:\n  url: https://pypi.io/packages/source/i/ipywidgets/ipywidgets-${{ version }}.tar.gz\n  sha256: d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9\n\nbuild:\n  noarch: python # (1)!\n  script: pip install . -v\n\nrequirements:\n  # note that there is no build section\n  host:\n    - pip\n    - python >=3.7\n    - setuptools\n    - wheel\n  run:\n    - comm >=0.1.3\n    - ipython >=6.1.0\n    - jupyterlab_widgets >=3.0.10,<3.1.0\n    - python >=3.7\n    - traitlets >=4.3.1\n    - widgetsnbextension >=4.0.10,<4.1.0\n\ntests:\n  - python:\n      imports:\n        - ipywidgets # (2)!\n\nabout:\n  homepage: https://github.com/ipython/ipywidgets\n  license: BSD-3-Clause\n  license_file: LICENSE\n  summary: Jupyter Interactive Widgets\n  description: |\n    ipywidgets are interactive HTML widgets for Jupyter notebooks and the IPython kernel.\n  documentation: https://ipywidgets.readthedocs.io/en/latest/\n
    1. The noarch: python line tells rattler-build that this package is pure Python and can be one-size-fits-all. noarch packages can be installed on any platform without modification which is very handy.
    2. The imports section in the tests is used to check that the package is installed correctly and can be imported.
    "},{"location":"tutorials/python/#running-the-recipe","title":"Running the recipe","text":"

    To build this recipe, simply run:

    rattler-build build --recipe ./ipywidgets\n
    "},{"location":"tutorials/python/#a-python-package-with-compiled-extensions","title":"A Python package with compiled extensions","text":"

    We will build a package for numpy \u2013 which contains compiled code. Since compiled code is python version-specific, we will need to specify the python version explicitly. The best way to do this is with a \"variant_config.yaml\" file:

    variant_config.yaml
    python:\n  - 3.11\n  - 3.12\n

    This will replace any python found in the recipe with the versions specified in the variant_config.yaml file.

    recipe.yaml
    context:\n  version: 1.26.4\n\npackage:\n  name: numpy\n  version: ${{ version }}\n\nsource:\n  - url: https://github.com/numpy/numpy/releases/download/v${{ version }}/numpy-${{ version }}.tar.gz\n    sha256: 2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010\n\nbuild:\n  python:\n    entry_points:\n      - f2py = numpy.f2py.f2py2e:main  # [win]\n\nrequirements:\n  build:\n    - ${{ compiler('c') }}\n    - ${{ compiler('cxx') }}\n  host:\n    # note: variant is injected here!\n    - python\n    - pip\n    - meson-python\n    - ninja\n    - pkg-config\n    - python-build\n    - cython\n    - libblas\n    - libcblas\n    - liblapack\n  run:\n    - python\n  run_exports:\n    - ${{ pin_subpackage(\"numpy\") }}\n\ntests:\n  - python:\n      imports:\n        - numpy\n        - numpy.array_api\n        - numpy.array_api.linalg\n        - numpy.ctypeslib\n\n  - script:\n    - f2py -h\n\nabout:\n  homepage: http://numpy.org/\n  license: BSD-3-Clause\n  license_file: LICENSE.txt\n  summary: The fundamental package for scientific computing with Python.\n  documentation: https://numpy.org/doc/stable/\n  repository: https://github.com/numpy/numpy\n

    The build script for Unix:

    build.sh
    mkdir builddir\n\n$PYTHON -m build -w -n -x \\\n    -Cbuilddir=builddir \\\n    -Csetup-args=-Dblas=blas \\\n    -Csetup-args=-Dlapack=lapack\n\n$PYTHON -m pip install dist/numpy*.whl\n

    The build script for Windows:

    build.bat
    mkdir builddir\n\n%PYTHON% -m build -w -n -x ^\n    -Cbuilddir=builddir ^\n    -Csetup-args=-Dblas=blas ^\n    -Csetup-args=-Dlapack=lapack\nif %ERRORLEVEL% neq 0 exit 1\n\n:: `pip install dist\\numpy*.whl` does not work on windows,\n:: so use a loop; there's only one wheel in dist/ anyway\nfor /f %%f in ('dir /b /S .\\dist') do (\n    pip install %%f\n    if %ERRORLEVEL% neq 0 exit 1\n)\n
    "},{"location":"tutorials/python/#running-the-recipe_1","title":"Running the recipe","text":"

    Running this recipe with the variant config file will build a a total of 2 numpy packages:

    rattler-build build --recipe ./numpy \\\n  --variant-config ./numpy/variant_config.yaml\n

    At the beginning of the build process, rattler-build will print the following message to show you the variants it found:

    Found variants:\n\nnumpy-1.26.4-py311h5f8ada8_0\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u252c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n\u2502 Variant         \u2506 Version   \u2502\n\u255e\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2561\n\u2502 python          \u2506 3.11      \u2502\n\u2502 target_platform \u2506 osx-arm64 \u2502\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2534\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n\nnumpy-1.26.4-py312h440f24a_0\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u252c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\n\u2502 Variant         \u2506 Version   \u2502\n\u255e\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2561\n\u2502 python          \u2506 3.12      \u2502\n\u2502 target_platform \u2506 osx-arm64 \u2502\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2534\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\n
    "},{"location":"tutorials/rust/","title":"Building a Rust package","text":"

    We're using rattler-build to build a Rust package for the cargo-edit utility. This utility manages Cargo dependencies from the command line.

    To configure the Rust compiler, we add a variant_config.yaml file to the package:

    variant_config.yaml
    rust_compiler: rust\n

    This will tell rattler-build what to insert for the ${{ compiler('rust') }} Jinja function.

    Note

    The ${{ compiler(...) }} functions are very useful in the context of cross-compilation. When the function is evaluated it will insert the correct compiler (as selected with the variant config) as well the target_platform. The \"rendered\" compiler will look like rust_linux-64 when you are targeting the linux-64 platform.

    You can read more about this in the cross-compilation section.

    recipe.yaml
    context:\n  version: \"0.11.9\"\n\npackage:\n  name: cargo-edit\n  version: ${{ version }}\n\nsource:\n  url: https://github.com/killercup/cargo-edit/archive/refs/tags/v${{ version }}.tar.gz\n  sha256: 46670295e2323fc2f826750cdcfb2692fbdbea87122fe530a07c50c8dba1d3d7\n\nbuild:\n  script:\n    - cargo-bundle-licenses --format yaml --output ${SRC_DIR}/THIRDPARTY.yml  # !(1)\n    - $BUILD_PREFIX/bin/cargo install --locked --bins --root ${PREFIX} --path .\n\nrequirements:\n  build:\n    - ${{ compiler('rust') }}\n    - cargo-bundle-licenses\n\ntests:\n  - script:\n      - cargo-upgrade --help # !(2)\n\nabout:\n  homepage: https://github.com/killercup/cargo-edit\n  license: MIT\n  license_file:\n    - LICENSE\n    - THIRDPARTY.yml\n  description: \"A utility for managing cargo dependencies from the command line.\"\n  summary: \"A utility for managing cargo dependencies from the command line.\"\n
    1. The cargo-bundle-licenses utility is used to bundle all the licenses of the dependencies into a THIRDPARTY.yml file. This file is then included in the package. You should always include this file in your package when you are redistributing it.
    2. Running scripts in bash or cmd.exe to test the package build well, expects an exit code of 0 to pass the test.

    To build this recipe, simply run:

    rattler-build build \\\n    --recipe ./cargo-edit/recipe.yaml \\\n    --variant-config ./cargo-edit/variant_config.yaml\n
    "}]} \ No newline at end of file diff --git a/dev/selectors/index.html b/dev/selectors/index.html index a7c5837fb..087231a47 100644 --- a/dev/selectors/index.html +++ b/dev/selectors/index.html @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -834,6 +794,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/sitemap.xml b/dev/sitemap.xml index e2632aea7..c939bb407 100644 --- a/dev/sitemap.xml +++ b/dev/sitemap.xml @@ -2,112 +2,117 @@ https://prefix-dev.github.io/rattler-build/dev/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/authentication_and_upload/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/automatic_linting/ - 2024-05-31 - daily - - - https://prefix-dev.github.io/rattler-build/dev/available_jinja/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/build_options/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/build_script/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/cli_usage/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/compilers/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/experimental_features/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/highlevel/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/internals/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/package_spec/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/rebuild/ - 2024-05-31 - daily - - - https://prefix-dev.github.io/rattler-build/dev/recipe_file/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/selectors/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/special_files/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/testing/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/tui/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/variants/ - 2024-05-31 + 2024-06-03 + daily + + + https://prefix-dev.github.io/rattler-build/dev/reference/cli/ + 2024-06-03 + daily + + + https://prefix-dev.github.io/rattler-build/dev/reference/jinja/ + 2024-06-03 + daily + + + https://prefix-dev.github.io/rattler-build/dev/reference/recipe_file/ + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/tutorials/cpp/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/tutorials/python/ - 2024-05-31 + 2024-06-03 daily https://prefix-dev.github.io/rattler-build/dev/tutorials/rust/ - 2024-05-31 + 2024-06-03 daily \ No newline at end of file diff --git a/dev/sitemap.xml.gz b/dev/sitemap.xml.gz index 29705d14d3754f5b7570cf18d3bac156459549f7..78de9aea9e376f0febfeb70b508af103dae1150d 100644 GIT binary patch literal 418 zcmV;T0bTwdiwFn~%w1*z|8r?{Wo=<_E_iKh0M(b#PU|oXhVOZbsrPMq0HFiw${kMt z*Of`V);nnuVrLs~Pr9L_Jpe*-ZsIzw^~Fm5B;E6Aahn4srr@{LpQf!g=xrFuPuuG4 z^`-t>J@+?VPL5GFCD`+}nnmK1F~{SvVG>**aud>I*v{pH&9~FE&12QyY)V@bTJ zT(JF`g-e>4a7U+l#6vTY%=x{cU=oNpO!a#vH<~hLI-y1(V>-~NI2HzK&xgHz0*T2$f|A|l1UBg0TO{1 zRwNQTOywzp>rF!;^`8756mqAc91+jTp=cu(b*A;)wlONCj-N6IuMj+C{)HFIxnq?6 M3&vf^eXbAy0BQiw9RL6T literal 416 zcmV;R0bl+fiwFqCyIE!e|8r?{Wo=<_E_iKh0M(blZrd;nhVOlfz;|0|w*lMWxVJvR zb`}_!vDwO!Dp5}Q_M?+x$O8-msEcWurauw%Z@PcJEPl6QE)>I|dF=Mh4ugv$h3U}z z{`Jy+Z=Q#TzGh#LcM=@Rp_!S}Ph-yKb61FQfmFJfCsW)kC&hd^&A$853=cb5TY{T) zUA>QP*s>H!lQN#@(vG-x6Y*S+9YvFY*<)^xmHcSrn(57U(|ryoyu@*M+CP15_utwt zj}r3fy9;wOgb7a>--i*%fAr?gbAvN~>HYyHRGu*~Ibf-&g)rKhe1uWc4&oy8&U%uK zgh>ziDo=r~Te}hFDY@a{#ZW#tGb;yo0tRbZVe8-fTU*juP6Fh=&^Hf*u`5MPj$L zFl{;~W!6K@SX51RTuBY<3Yj3VqGphRUy$Yd;Zjmaoj!PuK_Pgq - - - - - Recipe file - - - - - - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -834,6 +794,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/testing/index.html b/dev/testing/index.html index 2faac6c0c..f9be868da 100644 --- a/dev/testing/index.html +++ b/dev/testing/index.html @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -834,6 +794,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/tui/index.html b/dev/tui/index.html index ddc99db0e..5f8cb5411 100644 --- a/dev/tui/index.html +++ b/dev/tui/index.html @@ -442,26 +442,6 @@ -
  • - - - - - Recipe file - - - - -
  • - - - - - - - - -
  • @@ -482,26 +462,6 @@ -
  • - - - - - Jinja functions - - - - -
  • - - - - - - - - -
  • @@ -786,6 +746,116 @@ + + + + + + + + + + + + + + + +
  • + + + + + + + + + + +
  • + + + diff --git a/dev/tutorials/cpp/index.html b/dev/tutorials/cpp/index.html index da4b2126e..9fe8de51e 100644 --- a/dev/tutorials/cpp/index.html +++ b/dev/tutorials/cpp/index.html @@ -422,18 +422,84 @@ @@ -863,18 +999,84 @@
    • - + + + Building a Header-only Library + + + + +
    • - + - A C++ application + Building A C++ application + + + + + +
    • + +
    • + + + Parsing the rattler-build build Output @@ -905,207 +1107,254 @@

      Packaging a C++ package#

      This tutorial will guide you though making a C++ package with rattler-build.

      -

      Header-only library#

      -

      Here we will build a package for the header-only library xtensor. The package -depends on cmake and ninja for building.

      -

      The main "trick" is to instruct CMake to install the headers in the right -prefix, by using the CMAKE_INSTALL_PREFIX setting. On Unix, conda packages -follow the regular "Unix" prefix standard ($PREFIX/include and $PREFIX/lib -etc.). On Windows, it also looks like a "Unix" prefix but it's nested in a -Library folder ($PREFIX/Library/include and $PREFIX/Library/lib etc.). For -this reason, there are some handy variables (%LIBRARY_PREFIX% and -%LIBRARY_BIN%) that can be used in the CMake command to install the headers -and libraries in the right place.

      -
      recipe.yaml
      context:
      -  version: "0.24.6"
      -
      -package:
      -  name: xtensor
      -  version: ${{ version }}
      -
      -source:
      -  url: https://github.com/xtensor-stack/xtensor/archive/${{ version }}.tar.gz
      -  sha256: f87259b51aabafdd1183947747edfff4cff75d55375334f2e81cee6dc68ef655
      -
      -build:
      -  number: 0
      -  script:
      -    - if: win
      -      then: |
      -        cmake -GNinja \
      -            -D BUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% \
      -            %SRC_DIR%
      -        ninja install
      -      else: |
      -        cmake ${CMAKE_ARGS} -GNinja -DBUILD_TESTS=OFF \
      -              -DCMAKE_INSTALL_PREFIX=$PREFIX \
      -              $SRC_DIR
      -        ninja install
      -
      -requirements:
      -  build:
      -    - ${{ compiler('cxx') }}
      -    - cmake
      -    - ninja
      -  host:
      -    - xtl >=0.7,<0.8
      -  run:
      -    - xtl >=0.7,<0.8
      -  run_constraints:
      -    - xsimd >=8.0.3,<10
      -
      -tests:
      -  - package_contents:
      -      include:
      -        - xtensor/xarray.hpp
      -      files:
      -        - share/cmake/xtensor/xtensorConfig.cmake
      -        - share/cmake/xtensor/xtensorConfigVersion.cmake
      -
      -about:
      -  homepage: https://github.com/xtensor-stack/xtensor
      -  license: BSD-3-Clause
      -  license_file: LICENSE
      -  summary: The C++ tensor algebra library
      -  description: Multi dimensional arrays with broadcasting and lazy computing
      -  documentation: https://xtensor.readthedocs.io
      -  repository: https://github.com/xtensor-stack/xtensor
      -
      -extra:
      -  recipe-maintainers:
      -    - some-maintainer
      +

      Building a Header-only Library#

      +

      To build a package for the header-only library xtensor, you need to manage dependencies and ensure proper installation paths.

      +

      Key Steps#

      +
        +
      1. +

        Dependencies: + Ensure cmake, ninja, and a compiler are available as dependencies.

        +
      2. +
      3. +

        CMake Installation Prefix: + Use the CMAKE_INSTALL_PREFIX setting to instruct CMake to install the headers in the correct location.

        +
      4. +
      5. +

        Unix Systems: + Follow the standard Unix prefix: +

        $PREFIX/include
        +$PREFIX/lib
        +

        +
      6. +
      7. +

        Windows Systems: + Use a Unix-like prefix but nested in a Library directory: +

        $PREFIX/Library/include
        +$PREFIX/Library/lib
         
        -

        A C++ application#

        -

        In this example we will build poppler, a C++ application to manipulate PDF -files from the command line. The final package will install a few tools into the -bin/ folder.

        -
        recipe.yaml
        context:
        -  version: "24.01.0"
        -
        -package:
        -  name: poppler
        -  version: ${{ version }}
        -
        -source:
        -  url: https://poppler.freedesktop.org/poppler-${{ version }}.tar.xz
        -  sha256: c7def693a7a492830f49d497a80cc6b9c85cb57b15e9be2d2d615153b79cae08
        -
        -build:
        -  script: poppler-build.sh
        -
        -requirements:
        -  build:
        -    - ${{ compiler('c') }} # (1)!
        -    - ${{ compiler('cxx') }}
        -    - pkg-config
        -    - cmake
        -    - ninja
        -  host:
        -    - cairo # (2)!
        -    - fontconfig
        -    - freetype
        -    - glib
        -    - libboost-headers
        -    - libjpeg-turbo
        -    - lcms2
        -    - libiconv
        -    - libpng
        -    - libtiff
        -    - openjpeg
        -    - zlib
        -
        -tests:
        -  - script:
        -      - pdfinfo -listenc  # (3)!
        -      - pdfunite --help
        -      - pdftocairo --help
        +     Utilize the handy variables %LIBRARY_PREFIX% and %LIBRARY_BIN% to guide CMake to install the headers and libraries correctly.

        +
      8. +
      +

      This approach ensures that the headers and libraries are installed in the correct directories on both Unix and Windows systems.

      +

      Recipe#

      +
      recipe.yaml
      context:
      +  version: "0.24.6"
      +
      +package:
      +  name: xtensor
      +  version: ${{ version }}
      +
      +source:
      +  url: https://github.com/xtensor-stack/xtensor/archive/${{ version }}.tar.gz
      +  sha256: f87259b51aabafdd1183947747edfff4cff75d55375334f2e81cee6dc68ef655
      +
      +build:
      +  number: 0
      +  script:
      +    - if: win # (1)!
      +      then: |
      +        cmake -GNinja \
      +            -D BUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% \
      +            %SRC_DIR%
      +        ninja install
      +      else: |
      +        cmake ${CMAKE_ARGS} -GNinja -DBUILD_TESTS=OFF \
      +              -DCMAKE_INSTALL_PREFIX=$PREFIX \
      +              $SRC_DIR
      +        ninja install
      +
      +requirements:
      +  build:
      +    - ${{ compiler('cxx') }} # (2)!
      +    - cmake
      +    - ninja
      +  host:
      +    - xtl >=0.7,<0.8
      +  run:
      +    - xtl >=0.7,<0.8
      +  run_constraints: # (3)!
      +    - xsimd >=8.0.3,<10 
      +
      +tests:
      +  - package_contents:
      +      include: # (4)!
      +        - xtensor/xarray.hpp
      +      files: # (5)!
      +        - share/cmake/xtensor/xtensorConfig.cmake
      +        - share/cmake/xtensor/xtensorConfigVersion.cmake
      +
      +about:
      +  homepage: https://github.com/xtensor-stack/xtensor
      +  license: BSD-3-Clause
      +  license_file: LICENSE
      +  summary: The C++ tensor algebra library
      +  description: Multi dimensional arrays with broadcasting and lazy computing
      +  documentation: https://xtensor.readthedocs.io
      +  repository: https://github.com/xtensor-stack/xtensor
      +
      +extra:
      +  recipe-maintainers:
      +    - some-maintainer
      +
      +
        +
      1. The if: condition allows the user to switch behavior of the build based on some checks like, the operating system.
      2. +
      3. The compiler function is used to get the C++ compiler for the build system.
      4. +
      5. The run_constraints section specifies the version range of a package which the package can run "with". +But which the package doesn't depend on itself.
      6. +
      7. The include section specifies the header file to tested for existence.
      8. +
      9. The files section specifies the files to be tested for existence, using a glob pattern.
      10. +
      +

      Building A C++ application#

      +

      In this example, we'll build poppler, a C++ application for manipulating PDF files from the command line. +The final package will install several tools into the bin/ folder. +We'll use external build scripts and run actual scripts in the test.

      +

      Key Steps#

      +
        +
      1. +

        Dependencies:

        +
          +
        • Build Dependencies: These are necessary for the building process, including cmake, ninja, and pkg-config.
        • +
        • Host Dependencies: These are the libraries poppler links against, such as cairo, fontconfig, freetype, glib, and others.
        • +
        +
      2. +
      3. +

        Compiler Setup: + We use the compiler function to obtain the appropriate C and C++ compilers.

        +
      4. +
      5. +

        Build Script: + The build.script field points to an external script (poppler-build.sh) which contains the build commands.

        +
      6. +
      7. +

        Testing: + Simple tests are included to verify that the installed tools (pdfinfo, pdfunite, pdftocairo) are working correctly by running them, and expecting an exit code 0.

        +
      8. +
      +

      Recipe#

      +
      recipe.yaml
      context:
      +  version: "24.01.0"
      +
      +package:
      +  name: poppler
      +  version: ${{ version }}
      +
      +source:
      +  url: https://poppler.freedesktop.org/poppler-${{ version }}.tar.xz
      +  sha256: c7def693a7a492830f49d497a80cc6b9c85cb57b15e9be2d2d615153b79cae08
      +
      +build:
      +  script: poppler-build.sh
      +
      +requirements:
      +  build:
      +    - ${{ compiler('c') }} # (1)!
      +    - ${{ compiler('cxx') }}
      +    - pkg-config
      +    - cmake
      +    - ninja
      +  host:
      +    - cairo # (2)!
      +    - fontconfig
      +    - freetype
      +    - glib
      +    - libboost-headers
      +    - libjpeg-turbo
      +    - lcms2
      +    - libiconv
      +    - libpng
      +    - libtiff
      +    - openjpeg
      +    - zlib
      +
      +tests:
      +  - script:
      +      - pdfinfo -listenc  # (3)!
      +      - pdfunite --help
      +      - pdftocairo --help
       
        -
      1. We use the compiler function to get the compiler for C and C++.
      2. -
      3. These are all the dependencies that we link against
      4. +
      5. The compiler jinja function to get the correct compiler for C and C++ on the build system.
      6. +
      7. These are all the dependencies that the library links against.
      8. The script test just executes some of the installed tools to check if they - are working. You could run some more complex tests if you want.
      9. + are working. These can be as complex as you want. (bash or cmd.exe)
      +

      External Build Script#

      We've defined an external build script in the recipe. This will be searched next -to the recipe by the file name given (or by the default name build.sh or -build.bat).

      -
      poppler-build.sh
      #! /bin/bash
      -
      -extra_cmake_args=(
      -    -GNinja
      -    -DCMAKE_INSTALL_LIBDIR=lib
      -    -DENABLE_UNSTABLE_API_ABI_HEADERS=ON
      -    -DENABLE_GPGME=OFF
      -    -DENABLE_LIBCURL=OFF
      -    -DENABLE_LIBOPENJPEG=openjpeg2
      -    -DENABLE_QT6=OFF
      -    -DENABLE_QT5=OFF
      -    -DENABLE_NSS3=OFF
      -)
      -
      -mkdir build && cd build
      -
      -cmake ${CMAKE_ARGS} "${extra_cmake_args[@]}" \
      -    -DCMAKE_PREFIX_PATH=$PREFIX \
      -    -DCMAKE_INSTALL_PREFIX=$PREFIX \
      -    -DTIFF_INCLUDE_DIR=$PREFIX/include \
      -    $SRC_DIR
      -
      -ninja
      -
      -# The `install` command will take care of copying the files to the right place
      -ninja install
      +to the recipe by the file name given, or the default name build.sh on unix or
      +build.bat on windows are searched for.

      +
      poppler-build.sh
      #! /bin/bash
      +
      +extra_cmake_args=(
      +    -GNinja
      +    -DCMAKE_INSTALL_LIBDIR=lib
      +    -DENABLE_UNSTABLE_API_ABI_HEADERS=ON
      +    -DENABLE_GPGME=OFF
      +    -DENABLE_LIBCURL=OFF
      +    -DENABLE_LIBOPENJPEG=openjpeg2
      +    -DENABLE_QT6=OFF
      +    -DENABLE_QT5=OFF
      +    -DENABLE_NSS3=OFF
      +)
      +
      +mkdir build && cd build
      +
      +cmake ${CMAKE_ARGS} "${extra_cmake_args[@]}" \
      +    -DCMAKE_PREFIX_PATH=$PREFIX \
      +    -DCMAKE_INSTALL_PREFIX=$PREFIX \
      +    -DTIFF_INCLUDE_DIR=$PREFIX/include \
      +    $SRC_DIR
      +
      +ninja
      +
      +# The `install` command will take care of copying the files to the right place
      +ninja install
       
      -

      When you look at the output of the rattler-build command you might see some -interesting information.

      -

      Our package will have some run dependencies (even though we did not specify -any). These run-dependencies come from the "run-exports" of the packages we -depend on in the host section of the recipe. This is shown in the output of -rattler-build along with "RE of [host: package]".

      -

      Basically, libcurl declares "if you depend on me in the host section, then you -should also depend on me during runtime with the following version ranges". This -is important to make linking to shared libraries work correctly.

      -
      Run dependencies:
      -╭───────────────────────┬──────────────────────────────────────────────╮
      -│ Name                  ┆ Spec                                         │
      -╞═══════════════════════╪══════════════════════════════════════════════╡
      -│ libcurl               ┆ >=8.5.0,<9.0a0 (RE of [host: libcurl])       │
      -│ fontconfig            ┆ >=2.14.2,<3.0a0 (RE of [host: fontconfig])   │
      -│ fonts-conda-ecosystem ┆ (RE of [host: fontconfig])                   │
      -│ lcms2                 ┆ >=2.16,<3.0a0 (RE of [host: lcms2])          │
      -│ gettext               ┆ >=0.21.1,<1.0a0 (RE of [host: gettext])      │
      -│ freetype              ┆ >=2.12.1,<3.0a0 (RE of [host: freetype])     │
      -│ openjpeg              ┆ >=2.5.0,<3.0a0 (RE of [host: openjpeg])      │
      -│ libiconv              ┆ >=1.17,<2.0a0 (RE of [host: libiconv])       │
      -│ cairo                 ┆ >=1.18.0,<2.0a0 (RE of [host: cairo])        │
      -│ libpng                ┆ >=1.6.42,<1.7.0a0 (RE of [host: libpng])     │
      -│ libzlib               ┆ >=1.2.13,<1.3.0a0 (RE of [host: zlib])       │
      -│ libtiff               ┆ >=4.6.0,<4.7.0a0 (RE of [host: libtiff])     │
      -│ libjpeg-turbo         ┆ >=3.0.0,<4.0a0 (RE of [host: libjpeg-turbo]) │
      -│ libglib               ┆ >=2.78.3,<3.0a0 (RE of [host: glib])         │
      -│ libcxx                ┆ >=16 (RE of [build: clangxx_osx-arm64])      │
      -╰───────────────────────┴──────────────────────────────────────────────╯
      +

      Parsing the rattler-build build Output#

      +

      When running the rattler-build command, you might notice some interesting information in the output. +Our package will have some run dependencies, even if we didn't specify any.

      +

      These come from the run-exports of the packages listed in the host section of the recipe. +This is indicated by "RE of [host: package]" in the output.

      +

      For example, libcurl specifies that if you depend on it in the host section, you should also depend on it during runtime with specific version ranges. +This ensures proper linking to shared libraries.

      +
      Run dependencies:
      +╭───────────────────────┬──────────────────────────────────────────────╮
      +│ Name                  ┆ Spec                                         │
      +╞═══════════════════════╪══════════════════════════════════════════════╡
      +│ libcurl               ┆ >=8.5.0,<9.0a0 (RE of [host: libcurl])       │
      +│ fontconfig            ┆ >=2.14.2,<3.0a0 (RE of [host: fontconfig])   │
      +│ fonts-conda-ecosystem ┆ (RE of [host: fontconfig])                   │
      +│ lcms2                 ┆ >=2.16,<3.0a0 (RE of [host: lcms2])          │
      +│ gettext               ┆ >=0.21.1,<1.0a0 (RE of [host: gettext])      │
      +│ freetype              ┆ >=2.12.1,<3.0a0 (RE of [host: freetype])     │
      +│ openjpeg              ┆ >=2.5.0,<3.0a0 (RE of [host: openjpeg])      │
      +│ libiconv              ┆ >=1.17,<2.0a0 (RE of [host: libiconv])       │
      +│ cairo                 ┆ >=1.18.0,<2.0a0 (RE of [host: cairo])        │
      +│ libpng                ┆ >=1.6.42,<1.7.0a0 (RE of [host: libpng])     │
      +│ libzlib               ┆ >=1.2.13,<1.3.0a0 (RE of [host: zlib])       │
      +│ libtiff               ┆ >=4.6.0,<4.7.0a0 (RE of [host: libtiff])     │
      +│ libjpeg-turbo         ┆ >=3.0.0,<4.0a0 (RE of [host: libjpeg-turbo]) │
      +│ libglib               ┆ >=2.78.3,<3.0a0 (RE of [host: glib])         │
      +│ libcxx                ┆ >=16 (RE of [build: clangxx_osx-arm64])      │
      +╰───────────────────────┴──────────────────────────────────────────────╯
       
      -

      We can also observe some "linking" information in the output, for example on -macOS:

      -
      [lib/libpoppler-glib.8.26.0.dylib] links against:
      - ├─ @rpath/libgio-2.0.0.dylib
      - ├─ @rpath/libgobject-2.0.0.dylib
      - ├─ /usr/lib/libSystem.B.dylib
      - ├─ @rpath/libglib-2.0.0.dylib
      - ├─ @rpath/libpoppler.133.dylib
      - ├─ @rpath/libfreetype.6.dylib
      - ├─ @rpath/libc++.1.dylib
      - ├─ @rpath/libpoppler-glib.8.dylib
      - └─ @rpath/libcairo.2.dylib
      +

      You can also see "linking" information in the output, for example on macOS:

      +
      [lib/libpoppler-glib.8.26.0.dylib] links against:
      + ├─ @rpath/libgio-2.0.0.dylib
      + ├─ @rpath/libgobject-2.0.0.dylib
      + ├─ /usr/lib/libSystem.B.dylib
      + ├─ @rpath/libglib-2.0.0.dylib
      + ├─ @rpath/libpoppler.133.dylib
      + ├─ @rpath/libfreetype.6.dylib
      + ├─ @rpath/libc++.1.dylib
      + ├─ @rpath/libpoppler-glib.8.dylib
      + └─ @rpath/libcairo.2.dylib
       
      -

      rattler-build performs these checks to make sure that:

      +

      rattler-build ensures that:

        -
      1. All shared libraries that are linked against are present in the run - dependencies. If you link against a library that is not explicitly mentioned - in your recipe, you will get an "overlinking" warning.
      2. -
      3. You don't require any packages in host that you are not linking against. - this is the case, you will get an "overdepending" warning.
      4. +
      5. All shared libraries linked against are present in the run dependencies. +Missing libraries trigger an overlinking warning.
      6. +
      7. You don't require any packages in the host that you are not linking against. +This triggers an overdepending warning.
      diff --git a/dev/tutorials/python/index.html b/dev/tutorials/python/index.html index 8180f933e..36f39c4dd 100644 --- a/dev/tutorials/python/index.html +++ b/dev/tutorials/python/index.html @@ -400,6 +400,30 @@